From 3962c972ce44b2805e6e5bc76ffc055838b93d39 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 2 Apr 2024 21:58:27 +0000 Subject: [PATCH 1/2] docs: Allow 14 week backup retention for Firestore daily backups PiperOrigin-RevId: 621269025 Source-Link: https://github.com/googleapis/googleapis/commit/d0ed6724c4f2868bf4c6d8e0fe9223221f0f0e50 Source-Link: https://github.com/googleapis/googleapis-gen/commit/73c87bc51e565666bf4e399b194b3d71e1340ee7 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzNjODdiYzUxZTU2NTY2NmJmNGUzOTliMTk0YjNkNzFlMTM0MGVlNyJ9 --- owl-bot-staging/firestore/v1/.coveragerc | 13 + owl-bot-staging/firestore/v1/.flake8 | 33 + owl-bot-staging/firestore/v1/MANIFEST.in | 2 + owl-bot-staging/firestore/v1/README.rst | 49 + .../firestore/v1/docs/_static/custom.css | 3 + owl-bot-staging/firestore/v1/docs/conf.py | 376 + .../v1/docs/firestore_v1/firestore.rst | 10 + .../v1/docs/firestore_v1/services_.rst | 6 + .../firestore/v1/docs/firestore_v1/types_.rst | 6 + owl-bot-staging/firestore/v1/docs/index.rst | 7 + .../firestore/v1/firestore-v1-py.tar.gz | 0 .../v1/google/cloud/firestore/__init__.py | 133 + .../google/cloud/firestore/gapic_version.py | 16 + .../v1/google/cloud/firestore/py.typed | 2 + .../v1/google/cloud/firestore_v1/__init__.py | 134 + .../cloud/firestore_v1/gapic_metadata.json | 268 + .../cloud/firestore_v1/gapic_version.py | 16 + .../v1/google/cloud/firestore_v1/py.typed | 2 + .../cloud/firestore_v1/services/__init__.py | 15 + .../services/firestore/__init__.py | 22 + .../services/firestore/async_client.py | 2200 +++ .../firestore_v1/services/firestore/client.py | 2416 +++ .../firestore_v1/services/firestore/pagers.py | 383 + .../services/firestore/transports/__init__.py | 38 + .../services/firestore/transports/base.py | 532 + .../services/firestore/transports/grpc.py | 774 + .../firestore/transports/grpc_asyncio.py | 773 + .../services/firestore/transports/rest.py | 2188 +++ .../cloud/firestore_v1/types/__init__.py | 140 + .../firestore_v1/types/aggregation_result.py | 60 + .../cloud/firestore_v1/types/bloom_filter.py | 110 + .../google/cloud/firestore_v1/types/common.py | 172 + .../cloud/firestore_v1/types/document.py | 288 + .../cloud/firestore_v1/types/firestore.py | 1758 ++ .../google/cloud/firestore_v1/types/query.py | 875 + .../cloud/firestore_v1/types/query_profile.py | 144 + .../google/cloud/firestore_v1/types/write.py | 509 + owl-bot-staging/firestore/v1/mypy.ini | 3 + owl-bot-staging/firestore/v1/noxfile.py | 253 + ...ted_firestore_batch_get_documents_async.py | 54 + ...ated_firestore_batch_get_documents_sync.py | 54 + ...1_generated_firestore_batch_write_async.py | 52 + ...v1_generated_firestore_batch_write_sync.py | 52 + ...rated_firestore_begin_transaction_async.py | 52 + ...erated_firestore_begin_transaction_sync.py | 52 + ...ore_v1_generated_firestore_commit_async.py | 52 + ...tore_v1_generated_firestore_commit_sync.py | 52 + ...nerated_firestore_create_document_async.py | 53 + ...enerated_firestore_create_document_sync.py | 53 + ...nerated_firestore_delete_document_async.py | 50 + ...enerated_firestore_delete_document_sync.py | 50 + ..._generated_firestore_get_document_async.py | 53 + ...1_generated_firestore_get_document_sync.py | 53 + ...ted_firestore_list_collection_ids_async.py | 53 + ...ated_firestore_list_collection_ids_sync.py | 53 + ...enerated_firestore_list_documents_async.py | 54 + ...generated_firestore_list_documents_sync.py | 54 + ...ore_v1_generated_firestore_listen_async.py | 67 + ...tore_v1_generated_firestore_listen_sync.py | 67 + ...nerated_firestore_partition_query_async.py | 53 + ...enerated_firestore_partition_query_sync.py | 53 + ...e_v1_generated_firestore_rollback_async.py | 51 + ...re_v1_generated_firestore_rollback_sync.py | 51 + ...d_firestore_run_aggregation_query_async.py | 54 + ...ed_firestore_run_aggregation_query_sync.py | 54 + ..._v1_generated_firestore_run_query_async.py | 54 + ...e_v1_generated_firestore_run_query_sync.py | 54 + ...nerated_firestore_update_document_async.py | 51 + ...enerated_firestore_update_document_sync.py | 51 + ...tore_v1_generated_firestore_write_async.py | 63 + ...store_v1_generated_firestore_write_sync.py | 63 + .../snippet_metadata_google.firestore.v1.json | 2523 +++ .../v1/scripts/fixup_firestore_v1_keywords.py | 191 + owl-bot-staging/firestore/v1/setup.py | 93 + .../firestore/v1/testing/constraints-3.10.txt | 6 + .../firestore/v1/testing/constraints-3.11.txt | 6 + .../firestore/v1/testing/constraints-3.12.txt | 6 + .../firestore/v1/testing/constraints-3.7.txt | 10 + .../firestore/v1/testing/constraints-3.8.txt | 6 + .../firestore/v1/testing/constraints-3.9.txt | 6 + .../firestore/v1/tests/__init__.py | 16 + .../firestore/v1/tests/unit/__init__.py | 16 + .../firestore/v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/firestore_v1/__init__.py | 16 + .../unit/gapic/firestore_v1/test_firestore.py | 9455 ++++++++++ .../firestore_admin/v1/.coveragerc | 13 + owl-bot-staging/firestore_admin/v1/.flake8 | 33 + .../firestore_admin/v1/MANIFEST.in | 2 + owl-bot-staging/firestore_admin/v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../firestore_admin/v1/docs/conf.py | 376 + .../firestore_admin_v1/firestore_admin.rst | 10 + .../v1/docs/firestore_admin_v1/services_.rst | 6 + .../v1/docs/firestore_admin_v1/types_.rst | 6 + .../firestore_admin/v1/docs/index.rst | 7 + .../google/cloud/firestore_admin/__init__.py | 121 + .../cloud/firestore_admin/gapic_version.py | 16 + .../v1/google/cloud/firestore_admin/py.typed | 2 + .../cloud/firestore_admin_v1/__init__.py | 122 + .../firestore_admin_v1/gapic_metadata.json | 373 + .../cloud/firestore_admin_v1/gapic_version.py | 16 + .../google/cloud/firestore_admin_v1/py.typed | 2 + .../firestore_admin_v1/services/__init__.py | 15 + .../services/firestore_admin/__init__.py | 22 + .../services/firestore_admin/async_client.py | 3202 ++++ .../services/firestore_admin/client.py | 3570 ++++ .../services/firestore_admin/pagers.py | 262 + .../firestore_admin/transports/__init__.py | 38 + .../firestore_admin/transports/base.py | 551 + .../firestore_admin/transports/grpc.py | 1032 ++ .../transports/grpc_asyncio.py | 1031 ++ .../firestore_admin/transports/rest.py | 3178 ++++ .../firestore_admin_v1/types/__init__.py | 128 + .../cloud/firestore_admin_v1/types/backup.py | 152 + .../firestore_admin_v1/types/database.py | 294 + .../cloud/firestore_admin_v1/types/field.py | 183 + .../types/firestore_admin.py | 815 + .../cloud/firestore_admin_v1/types/index.py | 301 + .../firestore_admin_v1/types/location.py | 38 + .../firestore_admin_v1/types/operation.py | 507 + .../firestore_admin_v1/types/schedule.py | 145 + owl-bot-staging/firestore_admin/v1/mypy.ini | 3 + owl-bot-staging/firestore_admin/v1/noxfile.py | 253 + ...tore_admin_create_backup_schedule_async.py | 52 + ...store_admin_create_backup_schedule_sync.py | 52 + ...d_firestore_admin_create_database_async.py | 57 + ...ed_firestore_admin_create_database_sync.py | 57 + ...ated_firestore_admin_create_index_async.py | 56 + ...rated_firestore_admin_create_index_sync.py | 56 + ...ted_firestore_admin_delete_backup_async.py | 50 + ...tore_admin_delete_backup_schedule_async.py | 50 + ...store_admin_delete_backup_schedule_sync.py | 50 + ...ated_firestore_admin_delete_backup_sync.py | 50 + ...d_firestore_admin_delete_database_async.py | 56 + ...ed_firestore_admin_delete_database_sync.py | 56 + ...ated_firestore_admin_delete_index_async.py | 50 + ...rated_firestore_admin_delete_index_sync.py | 50 + ..._firestore_admin_export_documents_async.py | 56 + ...d_firestore_admin_export_documents_sync.py | 56 + ...erated_firestore_admin_get_backup_async.py | 52 + ...restore_admin_get_backup_schedule_async.py | 52 + ...irestore_admin_get_backup_schedule_sync.py | 52 + ...nerated_firestore_admin_get_backup_sync.py | 52 + ...ated_firestore_admin_get_database_async.py | 52 + ...rated_firestore_admin_get_database_sync.py | 52 + ...nerated_firestore_admin_get_field_async.py | 52 + ...enerated_firestore_admin_get_field_sync.py | 52 + ...nerated_firestore_admin_get_index_async.py | 52 + ...enerated_firestore_admin_get_index_sync.py | 52 + ..._firestore_admin_import_documents_async.py | 56 + ...d_firestore_admin_import_documents_sync.py | 56 + ...store_admin_list_backup_schedules_async.py | 52 + ...estore_admin_list_backup_schedules_sync.py | 52 + ...ated_firestore_admin_list_backups_async.py | 52 + ...rated_firestore_admin_list_backups_sync.py | 52 + ...ed_firestore_admin_list_databases_async.py | 52 + ...ted_firestore_admin_list_databases_sync.py | 52 + ...rated_firestore_admin_list_fields_async.py | 53 + ...erated_firestore_admin_list_fields_sync.py | 53 + ...ated_firestore_admin_list_indexes_async.py | 53 + ...rated_firestore_admin_list_indexes_sync.py | 53 + ..._firestore_admin_restore_database_async.py | 58 + ...d_firestore_admin_restore_database_sync.py | 58 + ...tore_admin_update_backup_schedule_async.py | 51 + ...store_admin_update_backup_schedule_sync.py | 51 + ...d_firestore_admin_update_database_async.py | 55 + ...ed_firestore_admin_update_database_sync.py | 55 + ...ated_firestore_admin_update_field_async.py | 59 + ...rated_firestore_admin_update_field_sync.py | 59 + ...et_metadata_google.firestore.admin.v1.json | 3740 ++++ .../fixup_firestore_admin_v1_keywords.py | 198 + owl-bot-staging/firestore_admin/v1/setup.py | 93 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.7.txt | 10 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + .../firestore_admin/v1/tests/__init__.py | 16 + .../firestore_admin/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/firestore_admin_v1/__init__.py | 16 + .../test_firestore_admin.py | 15150 ++++++++++++++++ .../firestore-bundle-py/.coveragerc | 13 + .../firestore-bundle-py/.flake8 | 33 + .../firestore-bundle-py/MANIFEST.in | 2 + .../firestore-bundle-py/README.rst | 49 + .../docs/_static/custom.css | 3 + .../docs/bundle/services_.rst | 4 + .../docs/bundle/types_.rst | 6 + .../firestore-bundle-py/docs/conf.py | 376 + .../firestore-bundle-py/docs/index.rst | 7 + .../google/cloud/bundle/__init__.py | 34 + .../google/cloud/bundle/gapic_metadata.json | 7 + .../google/cloud/bundle/gapic_version.py | 16 + .../google/cloud/bundle/py.typed | 2 + .../google/cloud/bundle/services/__init__.py | 15 + .../google/cloud/bundle/types/__init__.py | 30 + .../google/cloud/bundle/types/bundle.py | 251 + .../firestore-bundle-py/mypy.ini | 3 + .../firestore-bundle-py/noxfile.py | 253 + .../scripts/fixup_bundle_keywords.py | 175 + .../firestore-bundle-py/setup.py | 93 + .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.12.txt | 6 + .../testing/constraints-3.7.txt | 10 + .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../firestore-bundle-py/tests/__init__.py | 16 + .../tests/unit/__init__.py | 16 + .../tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/bundle/__init__.py | 16 + 213 files changed, 68921 insertions(+) create mode 100644 owl-bot-staging/firestore/v1/.coveragerc create mode 100644 owl-bot-staging/firestore/v1/.flake8 create mode 100644 owl-bot-staging/firestore/v1/MANIFEST.in create mode 100644 owl-bot-staging/firestore/v1/README.rst create mode 100644 owl-bot-staging/firestore/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/firestore/v1/docs/conf.py create mode 100644 owl-bot-staging/firestore/v1/docs/firestore_v1/firestore.rst create mode 100644 owl-bot-staging/firestore/v1/docs/firestore_v1/services_.rst create mode 100644 owl-bot-staging/firestore/v1/docs/firestore_v1/types_.rst create mode 100644 owl-bot-staging/firestore/v1/docs/index.rst create mode 100644 owl-bot-staging/firestore/v1/firestore-v1-py.tar.gz create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore/__init__.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore/gapic_version.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore/py.typed create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/__init__.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_metadata.json create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_version.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/py.typed create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/__init__.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/__init__.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/async_client.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/client.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/pagers.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/__init__.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/base.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/rest.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/__init__.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/aggregation_result.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/bloom_filter.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/common.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/document.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/firestore.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query_profile.py create mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/write.py create mode 100644 owl-bot-staging/firestore/v1/mypy.ini create mode 100644 owl-bot-staging/firestore/v1/noxfile.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_async.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_sync.py create mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/snippet_metadata_google.firestore.v1.json create mode 100644 owl-bot-staging/firestore/v1/scripts/fixup_firestore_v1_keywords.py create mode 100644 owl-bot-staging/firestore/v1/setup.py create mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/firestore/v1/tests/__init__.py create mode 100644 owl-bot-staging/firestore/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/firestore/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/__init__.py create mode 100644 owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/test_firestore.py create mode 100644 owl-bot-staging/firestore_admin/v1/.coveragerc create mode 100644 owl-bot-staging/firestore_admin/v1/.flake8 create mode 100644 owl-bot-staging/firestore_admin/v1/MANIFEST.in create mode 100644 owl-bot-staging/firestore_admin/v1/README.rst create mode 100644 owl-bot-staging/firestore_admin/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/firestore_admin/v1/docs/conf.py create mode 100644 owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/firestore_admin.rst create mode 100644 owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/services_.rst create mode 100644 owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/types_.rst create mode 100644 owl-bot-staging/firestore_admin/v1/docs/index.rst create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/gapic_version.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/py.typed create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_metadata.json create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_version.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/py.typed create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/client.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/backup.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/database.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/field.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/firestore_admin.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/index.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/location.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/operation.py create mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/schedule.py create mode 100644 owl-bot-staging/firestore_admin/v1/mypy.ini create mode 100644 owl-bot-staging/firestore_admin/v1/noxfile.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_async.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_sync.py create mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/snippet_metadata_google.firestore.admin.v1.json create mode 100644 owl-bot-staging/firestore_admin/v1/scripts/fixup_firestore_admin_v1_keywords.py create mode 100644 owl-bot-staging/firestore_admin/v1/setup.py create mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/firestore_admin/v1/tests/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/__init__.py create mode 100644 owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/.coveragerc create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/.flake8 create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/MANIFEST.in create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/README.rst create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/_static/custom.css create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/services_.rst create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/types_.rst create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/conf.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/index.rst create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/__init__.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_metadata.json create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_version.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/py.typed create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/services/__init__.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/__init__.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/bundle.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/mypy.ini create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/noxfile.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/scripts/fixup_bundle_keywords.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/setup.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/__init__.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/__init__.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/bundle/__init__.py diff --git a/owl-bot-staging/firestore/v1/.coveragerc b/owl-bot-staging/firestore/v1/.coveragerc new file mode 100644 index 0000000000..4c355f6455 --- /dev/null +++ b/owl-bot-staging/firestore/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/firestore/__init__.py + google/cloud/firestore/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/firestore/v1/.flake8 b/owl-bot-staging/firestore/v1/.flake8 new file mode 100644 index 0000000000..29227d4cf4 --- /dev/null +++ b/owl-bot-staging/firestore/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/firestore/v1/MANIFEST.in b/owl-bot-staging/firestore/v1/MANIFEST.in new file mode 100644 index 0000000000..f51407a0a0 --- /dev/null +++ b/owl-bot-staging/firestore/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/firestore *.py +recursive-include google/cloud/firestore_v1 *.py diff --git a/owl-bot-staging/firestore/v1/README.rst b/owl-bot-staging/firestore/v1/README.rst new file mode 100644 index 0000000000..c132117e8d --- /dev/null +++ b/owl-bot-staging/firestore/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Firestore API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Firestore API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/firestore/v1/docs/_static/custom.css b/owl-bot-staging/firestore/v1/docs/_static/custom.css new file mode 100644 index 0000000000..06423be0b5 --- /dev/null +++ b/owl-bot-staging/firestore/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/firestore/v1/docs/conf.py b/owl-bot-staging/firestore/v1/docs/conf.py new file mode 100644 index 0000000000..7eae2df026 --- /dev/null +++ b/owl-bot-staging/firestore/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-firestore documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-firestore" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-firestore-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-firestore.tex", + u"google-cloud-firestore Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-firestore", + u"Google Cloud Firestore Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-firestore", + u"google-cloud-firestore Documentation", + author, + "google-cloud-firestore", + "GAPIC library for Google Cloud Firestore API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/firestore/v1/docs/firestore_v1/firestore.rst b/owl-bot-staging/firestore/v1/docs/firestore_v1/firestore.rst new file mode 100644 index 0000000000..c32652de6b --- /dev/null +++ b/owl-bot-staging/firestore/v1/docs/firestore_v1/firestore.rst @@ -0,0 +1,10 @@ +Firestore +--------------------------- + +.. automodule:: google.cloud.firestore_v1.services.firestore + :members: + :inherited-members: + +.. automodule:: google.cloud.firestore_v1.services.firestore.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/firestore/v1/docs/firestore_v1/services_.rst b/owl-bot-staging/firestore/v1/docs/firestore_v1/services_.rst new file mode 100644 index 0000000000..f48b25d8cf --- /dev/null +++ b/owl-bot-staging/firestore/v1/docs/firestore_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Firestore v1 API +========================================== +.. toctree:: + :maxdepth: 2 + + firestore diff --git a/owl-bot-staging/firestore/v1/docs/firestore_v1/types_.rst b/owl-bot-staging/firestore/v1/docs/firestore_v1/types_.rst new file mode 100644 index 0000000000..1cc2e75c73 --- /dev/null +++ b/owl-bot-staging/firestore/v1/docs/firestore_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Firestore v1 API +======================================= + +.. automodule:: google.cloud.firestore_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/firestore/v1/docs/index.rst b/owl-bot-staging/firestore/v1/docs/index.rst new file mode 100644 index 0000000000..3c5e2cb410 --- /dev/null +++ b/owl-bot-staging/firestore/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + firestore_v1/services + firestore_v1/types diff --git a/owl-bot-staging/firestore/v1/firestore-v1-py.tar.gz b/owl-bot-staging/firestore/v1/firestore-v1-py.tar.gz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore/__init__.py new file mode 100644 index 0000000000..447c27098e --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore/__init__.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.firestore import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.firestore_v1.services.firestore.client import FirestoreClient +from google.cloud.firestore_v1.services.firestore.async_client import FirestoreAsyncClient + +from google.cloud.firestore_v1.types.aggregation_result import AggregationResult +from google.cloud.firestore_v1.types.bloom_filter import BitSequence +from google.cloud.firestore_v1.types.bloom_filter import BloomFilter +from google.cloud.firestore_v1.types.common import DocumentMask +from google.cloud.firestore_v1.types.common import Precondition +from google.cloud.firestore_v1.types.common import TransactionOptions +from google.cloud.firestore_v1.types.document import ArrayValue +from google.cloud.firestore_v1.types.document import Document +from google.cloud.firestore_v1.types.document import MapValue +from google.cloud.firestore_v1.types.document import Value +from google.cloud.firestore_v1.types.firestore import BatchGetDocumentsRequest +from google.cloud.firestore_v1.types.firestore import BatchGetDocumentsResponse +from google.cloud.firestore_v1.types.firestore import BatchWriteRequest +from google.cloud.firestore_v1.types.firestore import BatchWriteResponse +from google.cloud.firestore_v1.types.firestore import BeginTransactionRequest +from google.cloud.firestore_v1.types.firestore import BeginTransactionResponse +from google.cloud.firestore_v1.types.firestore import CommitRequest +from google.cloud.firestore_v1.types.firestore import CommitResponse +from google.cloud.firestore_v1.types.firestore import CreateDocumentRequest +from google.cloud.firestore_v1.types.firestore import DeleteDocumentRequest +from google.cloud.firestore_v1.types.firestore import GetDocumentRequest +from google.cloud.firestore_v1.types.firestore import ListCollectionIdsRequest +from google.cloud.firestore_v1.types.firestore import ListCollectionIdsResponse +from google.cloud.firestore_v1.types.firestore import ListDocumentsRequest +from google.cloud.firestore_v1.types.firestore import ListDocumentsResponse +from google.cloud.firestore_v1.types.firestore import ListenRequest +from google.cloud.firestore_v1.types.firestore import ListenResponse +from google.cloud.firestore_v1.types.firestore import PartitionQueryRequest +from google.cloud.firestore_v1.types.firestore import PartitionQueryResponse +from google.cloud.firestore_v1.types.firestore import RollbackRequest +from google.cloud.firestore_v1.types.firestore import RunAggregationQueryRequest +from google.cloud.firestore_v1.types.firestore import RunAggregationQueryResponse +from google.cloud.firestore_v1.types.firestore import RunQueryRequest +from google.cloud.firestore_v1.types.firestore import RunQueryResponse +from google.cloud.firestore_v1.types.firestore import Target +from google.cloud.firestore_v1.types.firestore import TargetChange +from google.cloud.firestore_v1.types.firestore import UpdateDocumentRequest +from google.cloud.firestore_v1.types.firestore import WriteRequest +from google.cloud.firestore_v1.types.firestore import WriteResponse +from google.cloud.firestore_v1.types.query import Cursor +from google.cloud.firestore_v1.types.query import StructuredAggregationQuery +from google.cloud.firestore_v1.types.query import StructuredQuery +from google.cloud.firestore_v1.types.query_profile import ExecutionStats +from google.cloud.firestore_v1.types.query_profile import ExplainMetrics +from google.cloud.firestore_v1.types.query_profile import ExplainOptions +from google.cloud.firestore_v1.types.query_profile import PlanSummary +from google.cloud.firestore_v1.types.write import DocumentChange +from google.cloud.firestore_v1.types.write import DocumentDelete +from google.cloud.firestore_v1.types.write import DocumentRemove +from google.cloud.firestore_v1.types.write import DocumentTransform +from google.cloud.firestore_v1.types.write import ExistenceFilter +from google.cloud.firestore_v1.types.write import Write +from google.cloud.firestore_v1.types.write import WriteResult + +__all__ = ('FirestoreClient', + 'FirestoreAsyncClient', + 'AggregationResult', + 'BitSequence', + 'BloomFilter', + 'DocumentMask', + 'Precondition', + 'TransactionOptions', + 'ArrayValue', + 'Document', + 'MapValue', + 'Value', + 'BatchGetDocumentsRequest', + 'BatchGetDocumentsResponse', + 'BatchWriteRequest', + 'BatchWriteResponse', + 'BeginTransactionRequest', + 'BeginTransactionResponse', + 'CommitRequest', + 'CommitResponse', + 'CreateDocumentRequest', + 'DeleteDocumentRequest', + 'GetDocumentRequest', + 'ListCollectionIdsRequest', + 'ListCollectionIdsResponse', + 'ListDocumentsRequest', + 'ListDocumentsResponse', + 'ListenRequest', + 'ListenResponse', + 'PartitionQueryRequest', + 'PartitionQueryResponse', + 'RollbackRequest', + 'RunAggregationQueryRequest', + 'RunAggregationQueryResponse', + 'RunQueryRequest', + 'RunQueryResponse', + 'Target', + 'TargetChange', + 'UpdateDocumentRequest', + 'WriteRequest', + 'WriteResponse', + 'Cursor', + 'StructuredAggregationQuery', + 'StructuredQuery', + 'ExecutionStats', + 'ExplainMetrics', + 'ExplainOptions', + 'PlanSummary', + 'DocumentChange', + 'DocumentDelete', + 'DocumentRemove', + 'DocumentTransform', + 'ExistenceFilter', + 'Write', + 'WriteResult', +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore/gapic_version.py b/owl-bot-staging/firestore/v1/google/cloud/firestore/gapic_version.py new file mode 100644 index 0000000000..558c8aab67 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore/py.typed b/owl-bot-staging/firestore/v1/google/cloud/firestore/py.typed new file mode 100644 index 0000000000..35a48b3acc --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-firestore package uses inline types. diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/__init__.py new file mode 100644 index 0000000000..63ce6226f5 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/__init__.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.firestore_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.firestore import FirestoreClient +from .services.firestore import FirestoreAsyncClient + +from .types.aggregation_result import AggregationResult +from .types.bloom_filter import BitSequence +from .types.bloom_filter import BloomFilter +from .types.common import DocumentMask +from .types.common import Precondition +from .types.common import TransactionOptions +from .types.document import ArrayValue +from .types.document import Document +from .types.document import MapValue +from .types.document import Value +from .types.firestore import BatchGetDocumentsRequest +from .types.firestore import BatchGetDocumentsResponse +from .types.firestore import BatchWriteRequest +from .types.firestore import BatchWriteResponse +from .types.firestore import BeginTransactionRequest +from .types.firestore import BeginTransactionResponse +from .types.firestore import CommitRequest +from .types.firestore import CommitResponse +from .types.firestore import CreateDocumentRequest +from .types.firestore import DeleteDocumentRequest +from .types.firestore import GetDocumentRequest +from .types.firestore import ListCollectionIdsRequest +from .types.firestore import ListCollectionIdsResponse +from .types.firestore import ListDocumentsRequest +from .types.firestore import ListDocumentsResponse +from .types.firestore import ListenRequest +from .types.firestore import ListenResponse +from .types.firestore import PartitionQueryRequest +from .types.firestore import PartitionQueryResponse +from .types.firestore import RollbackRequest +from .types.firestore import RunAggregationQueryRequest +from .types.firestore import RunAggregationQueryResponse +from .types.firestore import RunQueryRequest +from .types.firestore import RunQueryResponse +from .types.firestore import Target +from .types.firestore import TargetChange +from .types.firestore import UpdateDocumentRequest +from .types.firestore import WriteRequest +from .types.firestore import WriteResponse +from .types.query import Cursor +from .types.query import StructuredAggregationQuery +from .types.query import StructuredQuery +from .types.query_profile import ExecutionStats +from .types.query_profile import ExplainMetrics +from .types.query_profile import ExplainOptions +from .types.query_profile import PlanSummary +from .types.write import DocumentChange +from .types.write import DocumentDelete +from .types.write import DocumentRemove +from .types.write import DocumentTransform +from .types.write import ExistenceFilter +from .types.write import Write +from .types.write import WriteResult + +__all__ = ( + 'FirestoreAsyncClient', +'AggregationResult', +'ArrayValue', +'BatchGetDocumentsRequest', +'BatchGetDocumentsResponse', +'BatchWriteRequest', +'BatchWriteResponse', +'BeginTransactionRequest', +'BeginTransactionResponse', +'BitSequence', +'BloomFilter', +'CommitRequest', +'CommitResponse', +'CreateDocumentRequest', +'Cursor', +'DeleteDocumentRequest', +'Document', +'DocumentChange', +'DocumentDelete', +'DocumentMask', +'DocumentRemove', +'DocumentTransform', +'ExecutionStats', +'ExistenceFilter', +'ExplainMetrics', +'ExplainOptions', +'FirestoreClient', +'GetDocumentRequest', +'ListCollectionIdsRequest', +'ListCollectionIdsResponse', +'ListDocumentsRequest', +'ListDocumentsResponse', +'ListenRequest', +'ListenResponse', +'MapValue', +'PartitionQueryRequest', +'PartitionQueryResponse', +'PlanSummary', +'Precondition', +'RollbackRequest', +'RunAggregationQueryRequest', +'RunAggregationQueryResponse', +'RunQueryRequest', +'RunQueryResponse', +'StructuredAggregationQuery', +'StructuredQuery', +'Target', +'TargetChange', +'TransactionOptions', +'UpdateDocumentRequest', +'Value', +'Write', +'WriteRequest', +'WriteResponse', +'WriteResult', +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_metadata.json b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_metadata.json new file mode 100644 index 0000000000..d0462f9640 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_metadata.json @@ -0,0 +1,268 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.firestore_v1", + "protoPackage": "google.firestore.v1", + "schema": "1.0", + "services": { + "Firestore": { + "clients": { + "grpc": { + "libraryClient": "FirestoreClient", + "rpcs": { + "BatchGetDocuments": { + "methods": [ + "batch_get_documents" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListCollectionIds": { + "methods": [ + "list_collection_ids" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "Listen": { + "methods": [ + "listen" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "RunAggregationQuery": { + "methods": [ + "run_aggregation_query" + ] + }, + "RunQuery": { + "methods": [ + "run_query" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + }, + "Write": { + "methods": [ + "write" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FirestoreAsyncClient", + "rpcs": { + "BatchGetDocuments": { + "methods": [ + "batch_get_documents" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListCollectionIds": { + "methods": [ + "list_collection_ids" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "Listen": { + "methods": [ + "listen" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "RunAggregationQuery": { + "methods": [ + "run_aggregation_query" + ] + }, + "RunQuery": { + "methods": [ + "run_query" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + }, + "Write": { + "methods": [ + "write" + ] + } + } + }, + "rest": { + "libraryClient": "FirestoreClient", + "rpcs": { + "BatchGetDocuments": { + "methods": [ + "batch_get_documents" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListCollectionIds": { + "methods": [ + "list_collection_ids" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "Listen": { + "methods": [ + "listen" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "RunAggregationQuery": { + "methods": [ + "run_aggregation_query" + ] + }, + "RunQuery": { + "methods": [ + "run_query" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + }, + "Write": { + "methods": [ + "write" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_version.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_version.py new file mode 100644 index 0000000000..558c8aab67 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/py.typed b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/py.typed new file mode 100644 index 0000000000..35a48b3acc --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-firestore package uses inline types. diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/__init__.py new file mode 100644 index 0000000000..8f6cf06824 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/__init__.py new file mode 100644 index 0000000000..3a2cdd9b1a --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import FirestoreClient +from .async_client import FirestoreAsyncClient + +__all__ = ( + 'FirestoreClient', + 'FirestoreAsyncClient', +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/async_client.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/async_client.py new file mode 100644 index 0000000000..23e437047b --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/async_client.py @@ -0,0 +1,2200 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union + +from google.cloud.firestore_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.firestore_v1.services.firestore import pagers +from google.cloud.firestore_v1.types import aggregation_result +from google.cloud.firestore_v1.types import common +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.cloud.firestore_v1.types import query +from google.cloud.firestore_v1.types import query_profile +from google.cloud.firestore_v1.types import write as gf_write +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport +from .client import FirestoreClient + + +class FirestoreAsyncClient: + """The Cloud Firestore service. + + Cloud Firestore is a fast, fully managed, serverless, + cloud-native NoSQL document database that simplifies storing, + syncing, and querying data for your mobile, web, and IoT apps at + global scale. Its client libraries provide live synchronization + and offline support, while its security features and + integrations with Firebase and Google Cloud Platform accelerate + building truly serverless apps. + """ + + _client: FirestoreClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = FirestoreClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = FirestoreClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = FirestoreClient._DEFAULT_UNIVERSE + + common_billing_account_path = staticmethod(FirestoreClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(FirestoreClient.parse_common_billing_account_path) + common_folder_path = staticmethod(FirestoreClient.common_folder_path) + parse_common_folder_path = staticmethod(FirestoreClient.parse_common_folder_path) + common_organization_path = staticmethod(FirestoreClient.common_organization_path) + parse_common_organization_path = staticmethod(FirestoreClient.parse_common_organization_path) + common_project_path = staticmethod(FirestoreClient.common_project_path) + parse_common_project_path = staticmethod(FirestoreClient.parse_common_project_path) + common_location_path = staticmethod(FirestoreClient.common_location_path) + parse_common_location_path = staticmethod(FirestoreClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirestoreAsyncClient: The constructed client. + """ + return FirestoreClient.from_service_account_info.__func__(FirestoreAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirestoreAsyncClient: The constructed client. + """ + return FirestoreClient.from_service_account_file.__func__(FirestoreAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return FirestoreClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> FirestoreTransport: + """Returns the transport used by the client instance. + + Returns: + FirestoreTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial(type(FirestoreClient).get_transport_class, type(FirestoreClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, FirestoreTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the firestore async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.FirestoreTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = FirestoreClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def get_document(self, + request: Optional[Union[firestore.GetDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Gets a single document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_get_document(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.GetDocumentRequest( + transaction=b'transaction_blob', + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.GetDocumentRequest, dict]]): + The request object. The request for + [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.Document: + A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + """ + # Create or coerce a protobuf request object. + request = firestore.GetDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_document, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_documents(self, + request: Optional[Union[firestore.ListDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDocumentsAsyncPager: + r"""Lists documents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_list_documents(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.ListDocumentsRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.ListDocumentsRequest, dict]]): + The request object. The request for + [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsAsyncPager: + The response for + [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = firestore.ListDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_documents, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + ("collection_id", request.collection_id), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDocumentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_document(self, + request: Optional[Union[firestore.UpdateDocumentRequest, dict]] = None, + *, + document: Optional[gf_document.Document] = None, + update_mask: Optional[common.DocumentMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gf_document.Document: + r"""Updates or inserts a document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_update_document(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.UpdateDocumentRequest( + ) + + # Make the request + response = await client.update_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.UpdateDocumentRequest, dict]]): + The request object. The request for + [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. + document (:class:`google.cloud.firestore_v1.types.Document`): + Required. The updated document. + Creates the document if it does not + already exist. + + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.cloud.firestore_v1.types.DocumentMask`): + The fields to update. + None of the field paths in the mask may + contain a reserved name. + + If the document exists on the server and + has fields not referenced in the mask, + they are left unchanged. + Fields referenced in the mask, but not + present in the input document, are + deleted from the document on the server. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.Document: + A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore.UpdateDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_document, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("document.name", request.document.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_document(self, + request: Optional[Union[firestore.DeleteDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_delete_document(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + await client.delete_document(request=request) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.DeleteDocumentRequest, dict]]): + The request object. The request for + [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. + name (:class:`str`): + Required. The resource name of the Document to delete. + In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore.DeleteDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_document, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def batch_get_documents(self, + request: Optional[Union[firestore.BatchGetDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[firestore.BatchGetDocumentsResponse]]: + r"""Gets multiple documents. + + Documents returned by this method are not guaranteed to + be returned in the same order that they were requested. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_batch_get_documents(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.BatchGetDocumentsRequest( + transaction=b'transaction_blob', + database="database_value", + ) + + # Make the request + stream = await client.batch_get_documents(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.BatchGetDocumentsRequest, dict]]): + The request object. The request for + [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]: + The streamed response for + [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. + + """ + # Create or coerce a protobuf request object. + request = firestore.BatchGetDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_get_documents, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def begin_transaction(self, + request: Optional[Union[firestore.BeginTransactionRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore.BeginTransactionResponse: + r"""Starts a new transaction. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_begin_transaction(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.BeginTransactionRequest( + database="database_value", + ) + + # Make the request + response = await client.begin_transaction(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.BeginTransactionRequest, dict]]): + The request object. The request for + [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. + database (:class:`str`): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.BeginTransactionResponse: + The response for + [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore.BeginTransactionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.begin_transaction, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def commit(self, + request: Optional[Union[firestore.CommitRequest, dict]] = None, + *, + database: Optional[str] = None, + writes: Optional[MutableSequence[gf_write.Write]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore.CommitResponse: + r"""Commits a transaction, while optionally updating + documents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_commit(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.CommitRequest( + database="database_value", + ) + + # Make the request + response = await client.commit(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.CommitRequest, dict]]): + The request object. The request for + [Firestore.Commit][google.firestore.v1.Firestore.Commit]. + database (:class:`str`): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + writes (:class:`MutableSequence[google.cloud.firestore_v1.types.Write]`): + The writes to apply. + + Always executed atomically and in order. + + This corresponds to the ``writes`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.CommitResponse: + The response for + [Firestore.Commit][google.firestore.v1.Firestore.Commit]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, writes]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore.CommitRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if writes: + request.writes.extend(writes) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.commit, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rollback(self, + request: Optional[Union[firestore.RollbackRequest, dict]] = None, + *, + database: Optional[str] = None, + transaction: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Rolls back a transaction. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_rollback(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.RollbackRequest( + database="database_value", + transaction=b'transaction_blob', + ) + + # Make the request + await client.rollback(request=request) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.RollbackRequest, dict]]): + The request object. The request for + [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. + database (:class:`str`): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction (:class:`bytes`): + Required. The transaction to roll + back. + + This corresponds to the ``transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, transaction]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore.RollbackRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if transaction is not None: + request.transaction = transaction + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def run_query(self, + request: Optional[Union[firestore.RunQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[firestore.RunQueryResponse]]: + r"""Runs a query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_run_query(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.RunQueryRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + stream = await client.run_query(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.RunQueryRequest, dict]]): + The request object. The request for + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.firestore_v1.types.RunQueryResponse]: + The response for + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. + + """ + # Create or coerce a protobuf request object. + request = firestore.RunQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_query, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_aggregation_query(self, + request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[firestore.RunAggregationQueryResponse]]: + r"""Runs an aggregation query. + + Rather than producing [Document][google.firestore.v1.Document] + results like + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], + this API allows running an aggregation to produce a series of + [AggregationResult][google.firestore.v1.AggregationResult] + server-side. + + High-Level Example: + + :: + + -- Return the number of documents in table given a filter. + SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_run_aggregation_query(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.RunAggregationQueryRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + stream = await client.run_aggregation_query(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.RunAggregationQueryRequest, dict]]): + The request object. The request for + [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]: + The response for + [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. + + """ + # Create or coerce a protobuf request object. + request = firestore.RunAggregationQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_aggregation_query, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def partition_query(self, + request: Optional[Union[firestore.PartitionQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.PartitionQueryAsyncPager: + r"""Partitions a query by returning partition cursors + that can be used to run the query in parallel. The + returned partition cursors are split points that can be + used by RunQuery as starting/end points for the query + results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_partition_query(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.PartitionQueryRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.partition_query(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.PartitionQueryRequest, dict]]): + The request object. The request for + [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryAsyncPager: + The response for + [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = firestore.PartitionQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.partition_query, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.PartitionQueryAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def write(self, + requests: Optional[AsyncIterator[firestore.WriteRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[firestore.WriteResponse]]: + r"""Streams batches of document updates and deletes, in + order. This method is only available via gRPC or + WebChannel (not REST). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_write(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.WriteRequest( + database="database_value", + ) + + # This method expects an iterator which contains + # 'firestore_v1.WriteRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.write(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.firestore_v1.types.WriteRequest`]): + The request object AsyncIterator. The request for + [Firestore.Write][google.firestore.v1.Firestore.Write]. + + The first request creates a stream, or resumes an + existing one from a token. + + When creating a new stream, the server replies with a + response containing only an ID and a token, to use in + the next request. + + When resuming a stream, the server first streams any + responses later than the given token, then a response + containing only an up-to-date token, to use in the next + request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.firestore_v1.types.WriteResponse]: + The response for + [Firestore.Write][google.firestore.v1.Firestore.Write]. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.write, + default_timeout=86400.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def listen(self, + requests: Optional[AsyncIterator[firestore.ListenRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[firestore.ListenResponse]]: + r"""Listens to changes. This method is only available via + gRPC or WebChannel (not REST). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_listen(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + add_target = firestore_v1.Target() + add_target.resume_token = b'resume_token_blob' + + request = firestore_v1.ListenRequest( + add_target=add_target, + database="database_value", + ) + + # This method expects an iterator which contains + # 'firestore_v1.ListenRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.listen(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.firestore_v1.types.ListenRequest`]): + The request object AsyncIterator. A request for + [Firestore.Listen][google.firestore.v1.Firestore.Listen] + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.firestore_v1.types.ListenResponse]: + The response for + [Firestore.Listen][google.firestore.v1.Firestore.Listen]. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.listen, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=86400.0, + ), + default_timeout=86400.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_collection_ids(self, + request: Optional[Union[firestore.ListCollectionIdsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCollectionIdsAsyncPager: + r"""Lists all the collection IDs underneath a document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_list_collection_ids(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.ListCollectionIdsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_collection_ids(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.ListCollectionIdsRequest, dict]]): + The request object. The request for + [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. + parent (:class:`str`): + Required. The parent document. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + For example: + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsAsyncPager: + The response from + [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore.ListCollectionIdsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_collection_ids, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCollectionIdsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_write(self, + request: Optional[Union[firestore.BatchWriteRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore.BatchWriteResponse: + r"""Applies a batch of write operations. + + The BatchWrite method does not apply the write operations + atomically and can apply them out of order. Method does not + allow more than one write per document. Each write succeeds or + fails independently. See the + [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for + the success status of each write. + + If you require an atomically applied set of writes, use + [Commit][google.firestore.v1.Firestore.Commit] instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_batch_write(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.BatchWriteRequest( + database="database_value", + ) + + # Make the request + response = await client.batch_write(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.BatchWriteRequest, dict]]): + The request object. The request for + [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.BatchWriteResponse: + The response from + [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. + + """ + # Create or coerce a protobuf request object. + request = firestore.BatchWriteRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_write, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_document(self, + request: Optional[Union[firestore.CreateDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Creates a new document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_create_document(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.CreateDocumentRequest( + parent="parent_value", + collection_id="collection_id_value", + ) + + # Make the request + response = await client.create_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.CreateDocumentRequest, dict]]): + The request object. The request for + [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.Document: + A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + """ + # Create or coerce a protobuf request object. + request = firestore.CreateDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_document, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + ("collection_id", request.collection_id), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "FirestoreAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "FirestoreAsyncClient", +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/client.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/client.py new file mode 100644 index 0000000000..0d3cfdfb12 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/client.py @@ -0,0 +1,2416 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.firestore_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.firestore_v1.services.firestore import pagers +from google.cloud.firestore_v1.types import aggregation_result +from google.cloud.firestore_v1.types import common +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.cloud.firestore_v1.types import query +from google.cloud.firestore_v1.types import query_profile +from google.cloud.firestore_v1.types import write as gf_write +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import FirestoreGrpcTransport +from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport +from .transports.rest import FirestoreRestTransport + + +class FirestoreClientMeta(type): + """Metaclass for the Firestore client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]] + _transport_registry["grpc"] = FirestoreGrpcTransport + _transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport + _transport_registry["rest"] = FirestoreRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[FirestoreTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FirestoreClient(metaclass=FirestoreClientMeta): + """The Cloud Firestore service. + + Cloud Firestore is a fast, fully managed, serverless, + cloud-native NoSQL document database that simplifies storing, + syncing, and querying data for your mobile, web, and IoT apps at + global scale. Its client libraries provide live synchronization + and offline support, while its security features and + integrations with Firebase and Google Cloud Platform accelerate + building truly serverless apps. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "firestore.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "firestore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirestoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirestoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FirestoreTransport: + """Returns the transport used by the client instance. + + Returns: + FirestoreTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = FirestoreClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = FirestoreClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = FirestoreClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = FirestoreClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + FirestoreClient._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, FirestoreTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the firestore client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, FirestoreTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = FirestoreClient._read_environment_variables() + self._client_cert_source = FirestoreClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = FirestoreClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, FirestoreTransport) + if transport_provided: + # transport is a FirestoreTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(FirestoreTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + FirestoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(cast(str, transport)) + self._transport = Transport( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_document(self, + request: Optional[Union[firestore.GetDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Gets a single document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_get_document(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.GetDocumentRequest( + transaction=b'transaction_blob', + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.GetDocumentRequest, dict]): + The request object. The request for + [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.Document: + A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a firestore.GetDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.GetDocumentRequest): + request = firestore.GetDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_documents(self, + request: Optional[Union[firestore.ListDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDocumentsPager: + r"""Lists documents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_list_documents(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.ListDocumentsRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.ListDocumentsRequest, dict]): + The request object. The request for + [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsPager: + The response for + [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a firestore.ListDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.ListDocumentsRequest): + request = firestore.ListDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + ("collection_id", request.collection_id), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDocumentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_document(self, + request: Optional[Union[firestore.UpdateDocumentRequest, dict]] = None, + *, + document: Optional[gf_document.Document] = None, + update_mask: Optional[common.DocumentMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gf_document.Document: + r"""Updates or inserts a document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_update_document(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.UpdateDocumentRequest( + ) + + # Make the request + response = client.update_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.UpdateDocumentRequest, dict]): + The request object. The request for + [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. + document (google.cloud.firestore_v1.types.Document): + Required. The updated document. + Creates the document if it does not + already exist. + + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.cloud.firestore_v1.types.DocumentMask): + The fields to update. + None of the field paths in the mask may + contain a reserved name. + + If the document exists on the server and + has fields not referenced in the mask, + they are left unchanged. + Fields referenced in the mask, but not + present in the input document, are + deleted from the document on the server. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.Document: + A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore.UpdateDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.UpdateDocumentRequest): + request = firestore.UpdateDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("document.name", request.document.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_document(self, + request: Optional[Union[firestore.DeleteDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_delete_document(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + client.delete_document(request=request) + + Args: + request (Union[google.cloud.firestore_v1.types.DeleteDocumentRequest, dict]): + The request object. The request for + [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. + name (str): + Required. The resource name of the Document to delete. + In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore.DeleteDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.DeleteDocumentRequest): + request = firestore.DeleteDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def batch_get_documents(self, + request: Optional[Union[firestore.BatchGetDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[firestore.BatchGetDocumentsResponse]: + r"""Gets multiple documents. + + Documents returned by this method are not guaranteed to + be returned in the same order that they were requested. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_batch_get_documents(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.BatchGetDocumentsRequest( + transaction=b'transaction_blob', + database="database_value", + ) + + # Make the request + stream = client.batch_get_documents(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.BatchGetDocumentsRequest, dict]): + The request object. The request for + [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]: + The streamed response for + [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a firestore.BatchGetDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.BatchGetDocumentsRequest): + request = firestore.BatchGetDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_get_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def begin_transaction(self, + request: Optional[Union[firestore.BeginTransactionRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore.BeginTransactionResponse: + r"""Starts a new transaction. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_begin_transaction(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.BeginTransactionRequest( + database="database_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.BeginTransactionRequest, dict]): + The request object. The request for + [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.BeginTransactionResponse: + The response for + [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore.BeginTransactionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.BeginTransactionRequest): + request = firestore.BeginTransactionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.begin_transaction] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def commit(self, + request: Optional[Union[firestore.CommitRequest, dict]] = None, + *, + database: Optional[str] = None, + writes: Optional[MutableSequence[gf_write.Write]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore.CommitResponse: + r"""Commits a transaction, while optionally updating + documents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_commit(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.CommitRequest( + database="database_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.CommitRequest, dict]): + The request object. The request for + [Firestore.Commit][google.firestore.v1.Firestore.Commit]. + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + writes (MutableSequence[google.cloud.firestore_v1.types.Write]): + The writes to apply. + + Always executed atomically and in order. + + This corresponds to the ``writes`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.CommitResponse: + The response for + [Firestore.Commit][google.firestore.v1.Firestore.Commit]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, writes]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore.CommitRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.CommitRequest): + request = firestore.CommitRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if writes is not None: + request.writes = writes + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.commit] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rollback(self, + request: Optional[Union[firestore.RollbackRequest, dict]] = None, + *, + database: Optional[str] = None, + transaction: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Rolls back a transaction. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_rollback(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.RollbackRequest( + database="database_value", + transaction=b'transaction_blob', + ) + + # Make the request + client.rollback(request=request) + + Args: + request (Union[google.cloud.firestore_v1.types.RollbackRequest, dict]): + The request object. The request for + [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction (bytes): + Required. The transaction to roll + back. + + This corresponds to the ``transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, transaction]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore.RollbackRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.RollbackRequest): + request = firestore.RollbackRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if transaction is not None: + request.transaction = transaction + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def run_query(self, + request: Optional[Union[firestore.RunQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[firestore.RunQueryResponse]: + r"""Runs a query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_run_query(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.RunQueryRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + stream = client.run_query(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.RunQueryRequest, dict]): + The request object. The request for + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.firestore_v1.types.RunQueryResponse]: + The response for + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a firestore.RunQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.RunQueryRequest): + request = firestore.RunQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_aggregation_query(self, + request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[firestore.RunAggregationQueryResponse]: + r"""Runs an aggregation query. + + Rather than producing [Document][google.firestore.v1.Document] + results like + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], + this API allows running an aggregation to produce a series of + [AggregationResult][google.firestore.v1.AggregationResult] + server-side. + + High-Level Example: + + :: + + -- Return the number of documents in table given a filter. + SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_run_aggregation_query(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.RunAggregationQueryRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + stream = client.run_aggregation_query(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.RunAggregationQueryRequest, dict]): + The request object. The request for + [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]: + The response for + [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a firestore.RunAggregationQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.RunAggregationQueryRequest): + request = firestore.RunAggregationQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_aggregation_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def partition_query(self, + request: Optional[Union[firestore.PartitionQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.PartitionQueryPager: + r"""Partitions a query by returning partition cursors + that can be used to run the query in parallel. The + returned partition cursors are split points that can be + used by RunQuery as starting/end points for the query + results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_partition_query(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.PartitionQueryRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.partition_query(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.PartitionQueryRequest, dict]): + The request object. The request for + [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryPager: + The response for + [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a firestore.PartitionQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.PartitionQueryRequest): + request = firestore.PartitionQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.partition_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.PartitionQueryPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def write(self, + requests: Optional[Iterator[firestore.WriteRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[firestore.WriteResponse]: + r"""Streams batches of document updates and deletes, in + order. This method is only available via gRPC or + WebChannel (not REST). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_write(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.WriteRequest( + database="database_value", + ) + + # This method expects an iterator which contains + # 'firestore_v1.WriteRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.write(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.firestore_v1.types.WriteRequest]): + The request object iterator. The request for + [Firestore.Write][google.firestore.v1.Firestore.Write]. + + The first request creates a stream, or resumes an + existing one from a token. + + When creating a new stream, the server replies with a + response containing only an ID and a token, to use in + the next request. + + When resuming a stream, the server first streams any + responses later than the given token, then a response + containing only an up-to-date token, to use in the next + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.firestore_v1.types.WriteResponse]: + The response for + [Firestore.Write][google.firestore.v1.Firestore.Write]. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.write] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def listen(self, + requests: Optional[Iterator[firestore.ListenRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[firestore.ListenResponse]: + r"""Listens to changes. This method is only available via + gRPC or WebChannel (not REST). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_listen(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + add_target = firestore_v1.Target() + add_target.resume_token = b'resume_token_blob' + + request = firestore_v1.ListenRequest( + add_target=add_target, + database="database_value", + ) + + # This method expects an iterator which contains + # 'firestore_v1.ListenRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.listen(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.firestore_v1.types.ListenRequest]): + The request object iterator. A request for + [Firestore.Listen][google.firestore.v1.Firestore.Listen] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.firestore_v1.types.ListenResponse]: + The response for + [Firestore.Listen][google.firestore.v1.Firestore.Listen]. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.listen] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_collection_ids(self, + request: Optional[Union[firestore.ListCollectionIdsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCollectionIdsPager: + r"""Lists all the collection IDs underneath a document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_list_collection_ids(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.ListCollectionIdsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_collection_ids(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.ListCollectionIdsRequest, dict]): + The request object. The request for + [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. + parent (str): + Required. The parent document. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + For example: + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsPager: + The response from + [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore.ListCollectionIdsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.ListCollectionIdsRequest): + request = firestore.ListCollectionIdsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_collection_ids] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCollectionIdsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_write(self, + request: Optional[Union[firestore.BatchWriteRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore.BatchWriteResponse: + r"""Applies a batch of write operations. + + The BatchWrite method does not apply the write operations + atomically and can apply them out of order. Method does not + allow more than one write per document. Each write succeeds or + fails independently. See the + [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for + the success status of each write. + + If you require an atomically applied set of writes, use + [Commit][google.firestore.v1.Firestore.Commit] instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_batch_write(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.BatchWriteRequest( + database="database_value", + ) + + # Make the request + response = client.batch_write(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.BatchWriteRequest, dict]): + The request object. The request for + [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.BatchWriteResponse: + The response from + [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a firestore.BatchWriteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.BatchWriteRequest): + request = firestore.BatchWriteRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_write] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_document(self, + request: Optional[Union[firestore.CreateDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Creates a new document. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_create_document(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.CreateDocumentRequest( + parent="parent_value", + collection_id="collection_id_value", + ) + + # Make the request + response = client.create_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.CreateDocumentRequest, dict]): + The request object. The request for + [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_v1.types.Document: + A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a firestore.CreateDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore.CreateDocumentRequest): + request = firestore.CreateDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + ("collection_id", request.collection_id), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "FirestoreClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "FirestoreClient", +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/pagers.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/pagers.py new file mode 100644 index 0000000000..81b0d97435 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/pagers.py @@ -0,0 +1,383 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import firestore +from google.cloud.firestore_v1.types import query + + +class ListDocumentsPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``documents`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``documents`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., firestore.ListDocumentsResponse], + request: firestore.ListDocumentsRequest, + response: firestore.ListDocumentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_v1.types.ListDocumentsRequest): + The initial request object. + response (google.cloud.firestore_v1.types.ListDocumentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore.ListDocumentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[firestore.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[document.Document]: + for page in self.pages: + yield from page.documents + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDocumentsAsyncPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``documents`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``documents`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[firestore.ListDocumentsResponse]], + request: firestore.ListDocumentsRequest, + response: firestore.ListDocumentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_v1.types.ListDocumentsRequest): + The initial request object. + response (google.cloud.firestore_v1.types.ListDocumentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore.ListDocumentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[firestore.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[document.Document]: + async def async_generator(): + async for page in self.pages: + for response in page.documents: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class PartitionQueryPager: + """A pager for iterating through ``partition_query`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` object, and + provides an ``__iter__`` method to iterate through its + ``partitions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``PartitionQuery`` requests and continue to iterate + through the ``partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., firestore.PartitionQueryResponse], + request: firestore.PartitionQueryRequest, + response: firestore.PartitionQueryResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_v1.types.PartitionQueryRequest): + The initial request object. + response (google.cloud.firestore_v1.types.PartitionQueryResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore.PartitionQueryRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[firestore.PartitionQueryResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[query.Cursor]: + for page in self.pages: + yield from page.partitions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class PartitionQueryAsyncPager: + """A pager for iterating through ``partition_query`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``partitions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``PartitionQuery`` requests and continue to iterate + through the ``partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[firestore.PartitionQueryResponse]], + request: firestore.PartitionQueryRequest, + response: firestore.PartitionQueryResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_v1.types.PartitionQueryRequest): + The initial request object. + response (google.cloud.firestore_v1.types.PartitionQueryResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore.PartitionQueryRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[firestore.PartitionQueryResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[query.Cursor]: + async def async_generator(): + async for page in self.pages: + for response in page.partitions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListCollectionIdsPager: + """A pager for iterating through ``list_collection_ids`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``collection_ids`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCollectionIds`` requests and continue to iterate + through the ``collection_ids`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., firestore.ListCollectionIdsResponse], + request: firestore.ListCollectionIdsRequest, + response: firestore.ListCollectionIdsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_v1.types.ListCollectionIdsRequest): + The initial request object. + response (google.cloud.firestore_v1.types.ListCollectionIdsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore.ListCollectionIdsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[firestore.ListCollectionIdsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.collection_ids + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListCollectionIdsAsyncPager: + """A pager for iterating through ``list_collection_ids`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``collection_ids`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCollectionIds`` requests and continue to iterate + through the ``collection_ids`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[firestore.ListCollectionIdsResponse]], + request: firestore.ListCollectionIdsRequest, + response: firestore.ListCollectionIdsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_v1.types.ListCollectionIdsRequest): + The initial request object. + response (google.cloud.firestore_v1.types.ListCollectionIdsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore.ListCollectionIdsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[firestore.ListCollectionIdsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.collection_ids: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/__init__.py new file mode 100644 index 0000000000..f66168756b --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FirestoreTransport +from .grpc import FirestoreGrpcTransport +from .grpc_asyncio import FirestoreGrpcAsyncIOTransport +from .rest import FirestoreRestTransport +from .rest import FirestoreRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]] +_transport_registry['grpc'] = FirestoreGrpcTransport +_transport_registry['grpc_asyncio'] = FirestoreGrpcAsyncIOTransport +_transport_registry['rest'] = FirestoreRestTransport + +__all__ = ( + 'FirestoreTransport', + 'FirestoreGrpcTransport', + 'FirestoreGrpcAsyncIOTransport', + 'FirestoreRestTransport', + 'FirestoreRestInterceptor', +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/base.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/base.py new file mode 100644 index 0000000000..75dee60fd8 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/base.py @@ -0,0 +1,532 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.firestore_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class FirestoreTransport(abc.ABC): + """Abstract transport class for Firestore.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', + ) + + DEFAULT_HOST: str = 'firestore.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_document: gapic_v1.method.wrap_method( + self.get_document, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_documents: gapic_v1.method.wrap_method( + self.list_documents, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_document: gapic_v1.method.wrap_method( + self.update_document, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_document: gapic_v1.method.wrap_method( + self.delete_document, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.batch_get_documents: gapic_v1.method.wrap_method( + self.batch_get_documents, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.begin_transaction: gapic_v1.method.wrap_method( + self.begin_transaction, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.commit: gapic_v1.method.wrap_method( + self.commit, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.rollback: gapic_v1.method.wrap_method( + self.rollback, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_query: gapic_v1.method.wrap_method( + self.run_query, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.run_aggregation_query: gapic_v1.method.wrap_method( + self.run_aggregation_query, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.partition_query: gapic_v1.method.wrap_method( + self.partition_query, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.write: gapic_v1.method.wrap_method( + self.write, + default_timeout=86400.0, + client_info=client_info, + ), + self.listen: gapic_v1.method.wrap_method( + self.listen, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=86400.0, + ), + default_timeout=86400.0, + client_info=client_info, + ), + self.list_collection_ids: gapic_v1.method.wrap_method( + self.list_collection_ids, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.batch_write: gapic_v1.method.wrap_method( + self.batch_write, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_document: gapic_v1.method.wrap_method( + self.create_document, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_document(self) -> Callable[ + [firestore.GetDocumentRequest], + Union[ + document.Document, + Awaitable[document.Document] + ]]: + raise NotImplementedError() + + @property + def list_documents(self) -> Callable[ + [firestore.ListDocumentsRequest], + Union[ + firestore.ListDocumentsResponse, + Awaitable[firestore.ListDocumentsResponse] + ]]: + raise NotImplementedError() + + @property + def update_document(self) -> Callable[ + [firestore.UpdateDocumentRequest], + Union[ + gf_document.Document, + Awaitable[gf_document.Document] + ]]: + raise NotImplementedError() + + @property + def delete_document(self) -> Callable[ + [firestore.DeleteDocumentRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def batch_get_documents(self) -> Callable[ + [firestore.BatchGetDocumentsRequest], + Union[ + firestore.BatchGetDocumentsResponse, + Awaitable[firestore.BatchGetDocumentsResponse] + ]]: + raise NotImplementedError() + + @property + def begin_transaction(self) -> Callable[ + [firestore.BeginTransactionRequest], + Union[ + firestore.BeginTransactionResponse, + Awaitable[firestore.BeginTransactionResponse] + ]]: + raise NotImplementedError() + + @property + def commit(self) -> Callable[ + [firestore.CommitRequest], + Union[ + firestore.CommitResponse, + Awaitable[firestore.CommitResponse] + ]]: + raise NotImplementedError() + + @property + def rollback(self) -> Callable[ + [firestore.RollbackRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def run_query(self) -> Callable[ + [firestore.RunQueryRequest], + Union[ + firestore.RunQueryResponse, + Awaitable[firestore.RunQueryResponse] + ]]: + raise NotImplementedError() + + @property + def run_aggregation_query(self) -> Callable[ + [firestore.RunAggregationQueryRequest], + Union[ + firestore.RunAggregationQueryResponse, + Awaitable[firestore.RunAggregationQueryResponse] + ]]: + raise NotImplementedError() + + @property + def partition_query(self) -> Callable[ + [firestore.PartitionQueryRequest], + Union[ + firestore.PartitionQueryResponse, + Awaitable[firestore.PartitionQueryResponse] + ]]: + raise NotImplementedError() + + @property + def write(self) -> Callable[ + [firestore.WriteRequest], + Union[ + firestore.WriteResponse, + Awaitable[firestore.WriteResponse] + ]]: + raise NotImplementedError() + + @property + def listen(self) -> Callable[ + [firestore.ListenRequest], + Union[ + firestore.ListenResponse, + Awaitable[firestore.ListenResponse] + ]]: + raise NotImplementedError() + + @property + def list_collection_ids(self) -> Callable[ + [firestore.ListCollectionIdsRequest], + Union[ + firestore.ListCollectionIdsResponse, + Awaitable[firestore.ListCollectionIdsResponse] + ]]: + raise NotImplementedError() + + @property + def batch_write(self) -> Callable[ + [firestore.BatchWriteRequest], + Union[ + firestore.BatchWriteResponse, + Awaitable[firestore.BatchWriteResponse] + ]]: + raise NotImplementedError() + + @property + def create_document(self) -> Callable[ + [firestore.CreateDocumentRequest], + Union[ + document.Document, + Awaitable[document.Document] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'FirestoreTransport', +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc.py new file mode 100644 index 0000000000..61d093913b --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc.py @@ -0,0 +1,774 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import FirestoreTransport, DEFAULT_CLIENT_INFO + + +class FirestoreGrpcTransport(FirestoreTransport): + """gRPC backend transport for Firestore. + + The Cloud Firestore service. + + Cloud Firestore is a fast, fully managed, serverless, + cloud-native NoSQL document database that simplifies storing, + syncing, and querying data for your mobile, web, and IoT apps at + global scale. Its client libraries provide live synchronization + and offline support, while its security features and + integrations with Firebase and Google Cloud Platform accelerate + building truly serverless apps. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def get_document(self) -> Callable[ + [firestore.GetDocumentRequest], + document.Document]: + r"""Return a callable for the get document method over gRPC. + + Gets a single document. + + Returns: + Callable[[~.GetDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_document' not in self._stubs: + self._stubs['get_document'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/GetDocument', + request_serializer=firestore.GetDocumentRequest.serialize, + response_deserializer=document.Document.deserialize, + ) + return self._stubs['get_document'] + + @property + def list_documents(self) -> Callable[ + [firestore.ListDocumentsRequest], + firestore.ListDocumentsResponse]: + r"""Return a callable for the list documents method over gRPC. + + Lists documents. + + Returns: + Callable[[~.ListDocumentsRequest], + ~.ListDocumentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_documents' not in self._stubs: + self._stubs['list_documents'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/ListDocuments', + request_serializer=firestore.ListDocumentsRequest.serialize, + response_deserializer=firestore.ListDocumentsResponse.deserialize, + ) + return self._stubs['list_documents'] + + @property + def update_document(self) -> Callable[ + [firestore.UpdateDocumentRequest], + gf_document.Document]: + r"""Return a callable for the update document method over gRPC. + + Updates or inserts a document. + + Returns: + Callable[[~.UpdateDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_document' not in self._stubs: + self._stubs['update_document'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/UpdateDocument', + request_serializer=firestore.UpdateDocumentRequest.serialize, + response_deserializer=gf_document.Document.deserialize, + ) + return self._stubs['update_document'] + + @property + def delete_document(self) -> Callable[ + [firestore.DeleteDocumentRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete document method over gRPC. + + Deletes a document. + + Returns: + Callable[[~.DeleteDocumentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_document' not in self._stubs: + self._stubs['delete_document'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/DeleteDocument', + request_serializer=firestore.DeleteDocumentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_document'] + + @property + def batch_get_documents(self) -> Callable[ + [firestore.BatchGetDocumentsRequest], + firestore.BatchGetDocumentsResponse]: + r"""Return a callable for the batch get documents method over gRPC. + + Gets multiple documents. + + Documents returned by this method are not guaranteed to + be returned in the same order that they were requested. + + Returns: + Callable[[~.BatchGetDocumentsRequest], + ~.BatchGetDocumentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_documents' not in self._stubs: + self._stubs['batch_get_documents'] = self.grpc_channel.unary_stream( + '/google.firestore.v1.Firestore/BatchGetDocuments', + request_serializer=firestore.BatchGetDocumentsRequest.serialize, + response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, + ) + return self._stubs['batch_get_documents'] + + @property + def begin_transaction(self) -> Callable[ + [firestore.BeginTransactionRequest], + firestore.BeginTransactionResponse]: + r"""Return a callable for the begin transaction method over gRPC. + + Starts a new transaction. + + Returns: + Callable[[~.BeginTransactionRequest], + ~.BeginTransactionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'begin_transaction' not in self._stubs: + self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/BeginTransaction', + request_serializer=firestore.BeginTransactionRequest.serialize, + response_deserializer=firestore.BeginTransactionResponse.deserialize, + ) + return self._stubs['begin_transaction'] + + @property + def commit(self) -> Callable[ + [firestore.CommitRequest], + firestore.CommitResponse]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction, while optionally updating + documents. + + Returns: + Callable[[~.CommitRequest], + ~.CommitResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit' not in self._stubs: + self._stubs['commit'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/Commit', + request_serializer=firestore.CommitRequest.serialize, + response_deserializer=firestore.CommitResponse.deserialize, + ) + return self._stubs['commit'] + + @property + def rollback(self) -> Callable[ + [firestore.RollbackRequest], + empty_pb2.Empty]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction. + + Returns: + Callable[[~.RollbackRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback' not in self._stubs: + self._stubs['rollback'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/Rollback', + request_serializer=firestore.RollbackRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['rollback'] + + @property + def run_query(self) -> Callable[ + [firestore.RunQueryRequest], + firestore.RunQueryResponse]: + r"""Return a callable for the run query method over gRPC. + + Runs a query. + + Returns: + Callable[[~.RunQueryRequest], + ~.RunQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_query' not in self._stubs: + self._stubs['run_query'] = self.grpc_channel.unary_stream( + '/google.firestore.v1.Firestore/RunQuery', + request_serializer=firestore.RunQueryRequest.serialize, + response_deserializer=firestore.RunQueryResponse.deserialize, + ) + return self._stubs['run_query'] + + @property + def run_aggregation_query(self) -> Callable[ + [firestore.RunAggregationQueryRequest], + firestore.RunAggregationQueryResponse]: + r"""Return a callable for the run aggregation query method over gRPC. + + Runs an aggregation query. + + Rather than producing [Document][google.firestore.v1.Document] + results like + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], + this API allows running an aggregation to produce a series of + [AggregationResult][google.firestore.v1.AggregationResult] + server-side. + + High-Level Example: + + :: + + -- Return the number of documents in table given a filter. + SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); + + Returns: + Callable[[~.RunAggregationQueryRequest], + ~.RunAggregationQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_aggregation_query' not in self._stubs: + self._stubs['run_aggregation_query'] = self.grpc_channel.unary_stream( + '/google.firestore.v1.Firestore/RunAggregationQuery', + request_serializer=firestore.RunAggregationQueryRequest.serialize, + response_deserializer=firestore.RunAggregationQueryResponse.deserialize, + ) + return self._stubs['run_aggregation_query'] + + @property + def partition_query(self) -> Callable[ + [firestore.PartitionQueryRequest], + firestore.PartitionQueryResponse]: + r"""Return a callable for the partition query method over gRPC. + + Partitions a query by returning partition cursors + that can be used to run the query in parallel. The + returned partition cursors are split points that can be + used by RunQuery as starting/end points for the query + results. + + Returns: + Callable[[~.PartitionQueryRequest], + ~.PartitionQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_query' not in self._stubs: + self._stubs['partition_query'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/PartitionQuery', + request_serializer=firestore.PartitionQueryRequest.serialize, + response_deserializer=firestore.PartitionQueryResponse.deserialize, + ) + return self._stubs['partition_query'] + + @property + def write(self) -> Callable[ + [firestore.WriteRequest], + firestore.WriteResponse]: + r"""Return a callable for the write method over gRPC. + + Streams batches of document updates and deletes, in + order. This method is only available via gRPC or + WebChannel (not REST). + + Returns: + Callable[[~.WriteRequest], + ~.WriteResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'write' not in self._stubs: + self._stubs['write'] = self.grpc_channel.stream_stream( + '/google.firestore.v1.Firestore/Write', + request_serializer=firestore.WriteRequest.serialize, + response_deserializer=firestore.WriteResponse.deserialize, + ) + return self._stubs['write'] + + @property + def listen(self) -> Callable[ + [firestore.ListenRequest], + firestore.ListenResponse]: + r"""Return a callable for the listen method over gRPC. + + Listens to changes. This method is only available via + gRPC or WebChannel (not REST). + + Returns: + Callable[[~.ListenRequest], + ~.ListenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'listen' not in self._stubs: + self._stubs['listen'] = self.grpc_channel.stream_stream( + '/google.firestore.v1.Firestore/Listen', + request_serializer=firestore.ListenRequest.serialize, + response_deserializer=firestore.ListenResponse.deserialize, + ) + return self._stubs['listen'] + + @property + def list_collection_ids(self) -> Callable[ + [firestore.ListCollectionIdsRequest], + firestore.ListCollectionIdsResponse]: + r"""Return a callable for the list collection ids method over gRPC. + + Lists all the collection IDs underneath a document. + + Returns: + Callable[[~.ListCollectionIdsRequest], + ~.ListCollectionIdsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_collection_ids' not in self._stubs: + self._stubs['list_collection_ids'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/ListCollectionIds', + request_serializer=firestore.ListCollectionIdsRequest.serialize, + response_deserializer=firestore.ListCollectionIdsResponse.deserialize, + ) + return self._stubs['list_collection_ids'] + + @property + def batch_write(self) -> Callable[ + [firestore.BatchWriteRequest], + firestore.BatchWriteResponse]: + r"""Return a callable for the batch write method over gRPC. + + Applies a batch of write operations. + + The BatchWrite method does not apply the write operations + atomically and can apply them out of order. Method does not + allow more than one write per document. Each write succeeds or + fails independently. See the + [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for + the success status of each write. + + If you require an atomically applied set of writes, use + [Commit][google.firestore.v1.Firestore.Commit] instead. + + Returns: + Callable[[~.BatchWriteRequest], + ~.BatchWriteResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_write' not in self._stubs: + self._stubs['batch_write'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/BatchWrite', + request_serializer=firestore.BatchWriteRequest.serialize, + response_deserializer=firestore.BatchWriteResponse.deserialize, + ) + return self._stubs['batch_write'] + + @property + def create_document(self) -> Callable[ + [firestore.CreateDocumentRequest], + document.Document]: + r"""Return a callable for the create document method over gRPC. + + Creates a new document. + + Returns: + Callable[[~.CreateDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_document' not in self._stubs: + self._stubs['create_document'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/CreateDocument', + request_serializer=firestore.CreateDocumentRequest.serialize, + response_deserializer=document.Document.deserialize, + ) + return self._stubs['create_document'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'FirestoreGrpcTransport', +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py new file mode 100644 index 0000000000..64cef2eba2 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py @@ -0,0 +1,773 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import FirestoreTransport, DEFAULT_CLIENT_INFO +from .grpc import FirestoreGrpcTransport + + +class FirestoreGrpcAsyncIOTransport(FirestoreTransport): + """gRPC AsyncIO backend transport for Firestore. + + The Cloud Firestore service. + + Cloud Firestore is a fast, fully managed, serverless, + cloud-native NoSQL document database that simplifies storing, + syncing, and querying data for your mobile, web, and IoT apps at + global scale. Its client libraries provide live synchronization + and offline support, while its security features and + integrations with Firebase and Google Cloud Platform accelerate + building truly serverless apps. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_document(self) -> Callable[ + [firestore.GetDocumentRequest], + Awaitable[document.Document]]: + r"""Return a callable for the get document method over gRPC. + + Gets a single document. + + Returns: + Callable[[~.GetDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_document' not in self._stubs: + self._stubs['get_document'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/GetDocument', + request_serializer=firestore.GetDocumentRequest.serialize, + response_deserializer=document.Document.deserialize, + ) + return self._stubs['get_document'] + + @property + def list_documents(self) -> Callable[ + [firestore.ListDocumentsRequest], + Awaitable[firestore.ListDocumentsResponse]]: + r"""Return a callable for the list documents method over gRPC. + + Lists documents. + + Returns: + Callable[[~.ListDocumentsRequest], + Awaitable[~.ListDocumentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_documents' not in self._stubs: + self._stubs['list_documents'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/ListDocuments', + request_serializer=firestore.ListDocumentsRequest.serialize, + response_deserializer=firestore.ListDocumentsResponse.deserialize, + ) + return self._stubs['list_documents'] + + @property + def update_document(self) -> Callable[ + [firestore.UpdateDocumentRequest], + Awaitable[gf_document.Document]]: + r"""Return a callable for the update document method over gRPC. + + Updates or inserts a document. + + Returns: + Callable[[~.UpdateDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_document' not in self._stubs: + self._stubs['update_document'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/UpdateDocument', + request_serializer=firestore.UpdateDocumentRequest.serialize, + response_deserializer=gf_document.Document.deserialize, + ) + return self._stubs['update_document'] + + @property + def delete_document(self) -> Callable[ + [firestore.DeleteDocumentRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete document method over gRPC. + + Deletes a document. + + Returns: + Callable[[~.DeleteDocumentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_document' not in self._stubs: + self._stubs['delete_document'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/DeleteDocument', + request_serializer=firestore.DeleteDocumentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_document'] + + @property + def batch_get_documents(self) -> Callable[ + [firestore.BatchGetDocumentsRequest], + Awaitable[firestore.BatchGetDocumentsResponse]]: + r"""Return a callable for the batch get documents method over gRPC. + + Gets multiple documents. + + Documents returned by this method are not guaranteed to + be returned in the same order that they were requested. + + Returns: + Callable[[~.BatchGetDocumentsRequest], + Awaitable[~.BatchGetDocumentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_documents' not in self._stubs: + self._stubs['batch_get_documents'] = self.grpc_channel.unary_stream( + '/google.firestore.v1.Firestore/BatchGetDocuments', + request_serializer=firestore.BatchGetDocumentsRequest.serialize, + response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, + ) + return self._stubs['batch_get_documents'] + + @property + def begin_transaction(self) -> Callable[ + [firestore.BeginTransactionRequest], + Awaitable[firestore.BeginTransactionResponse]]: + r"""Return a callable for the begin transaction method over gRPC. + + Starts a new transaction. + + Returns: + Callable[[~.BeginTransactionRequest], + Awaitable[~.BeginTransactionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'begin_transaction' not in self._stubs: + self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/BeginTransaction', + request_serializer=firestore.BeginTransactionRequest.serialize, + response_deserializer=firestore.BeginTransactionResponse.deserialize, + ) + return self._stubs['begin_transaction'] + + @property + def commit(self) -> Callable[ + [firestore.CommitRequest], + Awaitable[firestore.CommitResponse]]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction, while optionally updating + documents. + + Returns: + Callable[[~.CommitRequest], + Awaitable[~.CommitResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit' not in self._stubs: + self._stubs['commit'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/Commit', + request_serializer=firestore.CommitRequest.serialize, + response_deserializer=firestore.CommitResponse.deserialize, + ) + return self._stubs['commit'] + + @property + def rollback(self) -> Callable[ + [firestore.RollbackRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction. + + Returns: + Callable[[~.RollbackRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback' not in self._stubs: + self._stubs['rollback'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/Rollback', + request_serializer=firestore.RollbackRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['rollback'] + + @property + def run_query(self) -> Callable[ + [firestore.RunQueryRequest], + Awaitable[firestore.RunQueryResponse]]: + r"""Return a callable for the run query method over gRPC. + + Runs a query. + + Returns: + Callable[[~.RunQueryRequest], + Awaitable[~.RunQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_query' not in self._stubs: + self._stubs['run_query'] = self.grpc_channel.unary_stream( + '/google.firestore.v1.Firestore/RunQuery', + request_serializer=firestore.RunQueryRequest.serialize, + response_deserializer=firestore.RunQueryResponse.deserialize, + ) + return self._stubs['run_query'] + + @property + def run_aggregation_query(self) -> Callable[ + [firestore.RunAggregationQueryRequest], + Awaitable[firestore.RunAggregationQueryResponse]]: + r"""Return a callable for the run aggregation query method over gRPC. + + Runs an aggregation query. + + Rather than producing [Document][google.firestore.v1.Document] + results like + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], + this API allows running an aggregation to produce a series of + [AggregationResult][google.firestore.v1.AggregationResult] + server-side. + + High-Level Example: + + :: + + -- Return the number of documents in table given a filter. + SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); + + Returns: + Callable[[~.RunAggregationQueryRequest], + Awaitable[~.RunAggregationQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_aggregation_query' not in self._stubs: + self._stubs['run_aggregation_query'] = self.grpc_channel.unary_stream( + '/google.firestore.v1.Firestore/RunAggregationQuery', + request_serializer=firestore.RunAggregationQueryRequest.serialize, + response_deserializer=firestore.RunAggregationQueryResponse.deserialize, + ) + return self._stubs['run_aggregation_query'] + + @property + def partition_query(self) -> Callable[ + [firestore.PartitionQueryRequest], + Awaitable[firestore.PartitionQueryResponse]]: + r"""Return a callable for the partition query method over gRPC. + + Partitions a query by returning partition cursors + that can be used to run the query in parallel. The + returned partition cursors are split points that can be + used by RunQuery as starting/end points for the query + results. + + Returns: + Callable[[~.PartitionQueryRequest], + Awaitable[~.PartitionQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_query' not in self._stubs: + self._stubs['partition_query'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/PartitionQuery', + request_serializer=firestore.PartitionQueryRequest.serialize, + response_deserializer=firestore.PartitionQueryResponse.deserialize, + ) + return self._stubs['partition_query'] + + @property + def write(self) -> Callable[ + [firestore.WriteRequest], + Awaitable[firestore.WriteResponse]]: + r"""Return a callable for the write method over gRPC. + + Streams batches of document updates and deletes, in + order. This method is only available via gRPC or + WebChannel (not REST). + + Returns: + Callable[[~.WriteRequest], + Awaitable[~.WriteResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'write' not in self._stubs: + self._stubs['write'] = self.grpc_channel.stream_stream( + '/google.firestore.v1.Firestore/Write', + request_serializer=firestore.WriteRequest.serialize, + response_deserializer=firestore.WriteResponse.deserialize, + ) + return self._stubs['write'] + + @property + def listen(self) -> Callable[ + [firestore.ListenRequest], + Awaitable[firestore.ListenResponse]]: + r"""Return a callable for the listen method over gRPC. + + Listens to changes. This method is only available via + gRPC or WebChannel (not REST). + + Returns: + Callable[[~.ListenRequest], + Awaitable[~.ListenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'listen' not in self._stubs: + self._stubs['listen'] = self.grpc_channel.stream_stream( + '/google.firestore.v1.Firestore/Listen', + request_serializer=firestore.ListenRequest.serialize, + response_deserializer=firestore.ListenResponse.deserialize, + ) + return self._stubs['listen'] + + @property + def list_collection_ids(self) -> Callable[ + [firestore.ListCollectionIdsRequest], + Awaitable[firestore.ListCollectionIdsResponse]]: + r"""Return a callable for the list collection ids method over gRPC. + + Lists all the collection IDs underneath a document. + + Returns: + Callable[[~.ListCollectionIdsRequest], + Awaitable[~.ListCollectionIdsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_collection_ids' not in self._stubs: + self._stubs['list_collection_ids'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/ListCollectionIds', + request_serializer=firestore.ListCollectionIdsRequest.serialize, + response_deserializer=firestore.ListCollectionIdsResponse.deserialize, + ) + return self._stubs['list_collection_ids'] + + @property + def batch_write(self) -> Callable[ + [firestore.BatchWriteRequest], + Awaitable[firestore.BatchWriteResponse]]: + r"""Return a callable for the batch write method over gRPC. + + Applies a batch of write operations. + + The BatchWrite method does not apply the write operations + atomically and can apply them out of order. Method does not + allow more than one write per document. Each write succeeds or + fails independently. See the + [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for + the success status of each write. + + If you require an atomically applied set of writes, use + [Commit][google.firestore.v1.Firestore.Commit] instead. + + Returns: + Callable[[~.BatchWriteRequest], + Awaitable[~.BatchWriteResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_write' not in self._stubs: + self._stubs['batch_write'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/BatchWrite', + request_serializer=firestore.BatchWriteRequest.serialize, + response_deserializer=firestore.BatchWriteResponse.deserialize, + ) + return self._stubs['batch_write'] + + @property + def create_document(self) -> Callable[ + [firestore.CreateDocumentRequest], + Awaitable[document.Document]]: + r"""Return a callable for the create document method over gRPC. + + Creates a new document. + + Returns: + Callable[[~.CreateDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_document' not in self._stubs: + self._stubs['create_document'] = self.grpc_channel.unary_unary( + '/google.firestore.v1.Firestore/CreateDocument', + request_serializer=firestore.CreateDocumentRequest.serialize, + response_deserializer=document.Document.deserialize, + ) + return self._stubs['create_document'] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'FirestoreGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/rest.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/rest.py new file mode 100644 index 0000000000..55351bcc73 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/rest.py @@ -0,0 +1,2188 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from .base import FirestoreTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FirestoreRestInterceptor: + """Interceptor for Firestore. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FirestoreRestTransport. + + .. code-block:: python + class MyCustomFirestoreInterceptor(FirestoreRestInterceptor): + def pre_batch_get_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_write(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_write(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_begin_transaction(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_begin_transaction(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_commit(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_commit(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_collection_ids(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_collection_ids(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_partition_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_partition_query(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rollback(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_run_aggregation_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_aggregation_query(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_query(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_document(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FirestoreRestTransport(interceptor=MyCustomFirestoreInterceptor()) + client = FirestoreClient(transport=transport) + + + """ + def pre_batch_get_documents(self, request: firestore.BatchGetDocumentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.BatchGetDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_get_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_batch_get_documents(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for batch_get_documents + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_batch_write(self, request: firestore.BatchWriteRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.BatchWriteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_write + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_batch_write(self, response: firestore.BatchWriteResponse) -> firestore.BatchWriteResponse: + """Post-rpc interceptor for batch_write + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_begin_transaction(self, request: firestore.BeginTransactionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.BeginTransactionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for begin_transaction + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_begin_transaction(self, response: firestore.BeginTransactionResponse) -> firestore.BeginTransactionResponse: + """Post-rpc interceptor for begin_transaction + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_commit(self, request: firestore.CommitRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.CommitRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for commit + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_commit(self, response: firestore.CommitResponse) -> firestore.CommitResponse: + """Post-rpc interceptor for commit + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_create_document(self, request: firestore.CreateDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.CreateDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_create_document(self, response: document.Document) -> document.Document: + """Post-rpc interceptor for create_document + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_delete_document(self, request: firestore.DeleteDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.DeleteDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def pre_get_document(self, request: firestore.GetDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.GetDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_get_document(self, response: document.Document) -> document.Document: + """Post-rpc interceptor for get_document + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_list_collection_ids(self, request: firestore.ListCollectionIdsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.ListCollectionIdsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_collection_ids + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_list_collection_ids(self, response: firestore.ListCollectionIdsResponse) -> firestore.ListCollectionIdsResponse: + """Post-rpc interceptor for list_collection_ids + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_list_documents(self, request: firestore.ListDocumentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.ListDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_list_documents(self, response: firestore.ListDocumentsResponse) -> firestore.ListDocumentsResponse: + """Post-rpc interceptor for list_documents + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_partition_query(self, request: firestore.PartitionQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.PartitionQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for partition_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_partition_query(self, response: firestore.PartitionQueryResponse) -> firestore.PartitionQueryResponse: + """Post-rpc interceptor for partition_query + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_rollback(self, request: firestore.RollbackRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.RollbackRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rollback + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def pre_run_aggregation_query(self, request: firestore.RunAggregationQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.RunAggregationQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_aggregation_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_run_aggregation_query(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for run_aggregation_query + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_run_query(self, request: firestore.RunQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.RunQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_run_query(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for run_query + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_update_document(self, request: firestore.UpdateDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.UpdateDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_update_document(self, response: gf_document.Document) -> gf_document.Document: + """Post-rpc interceptor for update_document + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FirestoreRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FirestoreRestInterceptor + + +class FirestoreRestTransport(FirestoreTransport): + """REST backend transport for Firestore. + + The Cloud Firestore service. + + Cloud Firestore is a fast, fully managed, serverless, + cloud-native NoSQL document database that simplifies storing, + syncing, and querying data for your mobile, web, and IoT apps at + global scale. Its client libraries provide live synchronization + and offline support, while its security features and + integrations with Firebase and Google Cloud Platform accelerate + building truly serverless apps. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[FirestoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FirestoreRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchGetDocuments(FirestoreRestStub): + def __hash__(self): + return hash("BatchGetDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.BatchGetDocumentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> rest_streaming.ResponseIterator: + r"""Call the batch get documents method over HTTP. + + Args: + request (~.firestore.BatchGetDocumentsRequest): + The request object. The request for + [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore.BatchGetDocumentsResponse: + The streamed response for + [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/databases/*}/documents:batchGet', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents(request, metadata) + pb_request = firestore.BatchGetDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, firestore.BatchGetDocumentsResponse) + resp = self._interceptor.post_batch_get_documents(resp) + return resp + + class _BatchWrite(FirestoreRestStub): + def __hash__(self): + return hash("BatchWrite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.BatchWriteRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore.BatchWriteResponse: + r"""Call the batch write method over HTTP. + + Args: + request (~.firestore.BatchWriteRequest): + The request object. The request for + [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore.BatchWriteResponse: + The response from + [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/databases/*}/documents:batchWrite', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_batch_write(request, metadata) + pb_request = firestore.BatchWriteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore.BatchWriteResponse() + pb_resp = firestore.BatchWriteResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_write(resp) + return resp + + class _BeginTransaction(FirestoreRestStub): + def __hash__(self): + return hash("BeginTransaction") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.BeginTransactionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore.BeginTransactionResponse: + r"""Call the begin transaction method over HTTP. + + Args: + request (~.firestore.BeginTransactionRequest): + The request object. The request for + [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore.BeginTransactionResponse: + The response for + [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/databases/*}/documents:beginTransaction', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_begin_transaction(request, metadata) + pb_request = firestore.BeginTransactionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore.BeginTransactionResponse() + pb_resp = firestore.BeginTransactionResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_begin_transaction(resp) + return resp + + class _Commit(FirestoreRestStub): + def __hash__(self): + return hash("Commit") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.CommitRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore.CommitResponse: + r"""Call the commit method over HTTP. + + Args: + request (~.firestore.CommitRequest): + The request object. The request for + [Firestore.Commit][google.firestore.v1.Firestore.Commit]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore.CommitResponse: + The response for + [Firestore.Commit][google.firestore.v1.Firestore.Commit]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/databases/*}/documents:commit', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_commit(request, metadata) + pb_request = firestore.CommitRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore.CommitResponse() + pb_resp = firestore.CommitResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_commit(resp) + return resp + + class _CreateDocument(FirestoreRestStub): + def __hash__(self): + return hash("CreateDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.CreateDocumentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> document.Document: + r"""Call the create document method over HTTP. + + Args: + request (~.firestore.CreateDocumentRequest): + The request object. The request for + [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document.Document: + A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/documents/**}/{collection_id}', + 'body': 'document', + }, + ] + request, metadata = self._interceptor.pre_create_document(request, metadata) + pb_request = firestore.CreateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document.Document() + pb_resp = document.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_document(resp) + return resp + + class _DeleteDocument(FirestoreRestStub): + def __hash__(self): + return hash("DeleteDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.DeleteDocumentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete document method over HTTP. + + Args: + request (~.firestore.DeleteDocumentRequest): + The request object. The request for + [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/databases/*/documents/*/**}', + }, + ] + request, metadata = self._interceptor.pre_delete_document(request, metadata) + pb_request = firestore.DeleteDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDocument(FirestoreRestStub): + def __hash__(self): + return hash("GetDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.GetDocumentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> document.Document: + r"""Call the get document method over HTTP. + + Args: + request (~.firestore.GetDocumentRequest): + The request object. The request for + [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document.Document: + A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*/documents/*/**}', + }, + ] + request, metadata = self._interceptor.pre_get_document(request, metadata) + pb_request = firestore.GetDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document.Document() + pb_resp = document.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_document(resp) + return resp + + class _ListCollectionIds(FirestoreRestStub): + def __hash__(self): + return hash("ListCollectionIds") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.ListCollectionIdsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore.ListCollectionIdsResponse: + r"""Call the list collection ids method over HTTP. + + Args: + request (~.firestore.ListCollectionIdsRequest): + The request object. The request for + [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore.ListCollectionIdsResponse: + The response from + [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/documents}:listCollectionIds', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_list_collection_ids(request, metadata) + pb_request = firestore.ListCollectionIdsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore.ListCollectionIdsResponse() + pb_resp = firestore.ListCollectionIdsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_collection_ids(resp) + return resp + + class _ListDocuments(FirestoreRestStub): + def __hash__(self): + return hash("ListDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.ListDocumentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore.ListDocumentsResponse: + r"""Call the list documents method over HTTP. + + Args: + request (~.firestore.ListDocumentsRequest): + The request object. The request for + [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore.ListDocumentsResponse: + The response for + [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}', + }, +{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/databases/*/documents}/{collection_id}', + }, + ] + request, metadata = self._interceptor.pre_list_documents(request, metadata) + pb_request = firestore.ListDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore.ListDocumentsResponse() + pb_resp = firestore.ListDocumentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_documents(resp) + return resp + + class _Listen(FirestoreRestStub): + def __hash__(self): + return hash("Listen") + + def __call__(self, + request: firestore.ListenRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method Listen is not available over REST transport" + ) + class _PartitionQuery(FirestoreRestStub): + def __hash__(self): + return hash("PartitionQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.PartitionQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore.PartitionQueryResponse: + r"""Call the partition query method over HTTP. + + Args: + request (~.firestore.PartitionQueryRequest): + The request object. The request for + [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore.PartitionQueryResponse: + The response for + [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/documents}:partitionQuery', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}:partitionQuery', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_partition_query(request, metadata) + pb_request = firestore.PartitionQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore.PartitionQueryResponse() + pb_resp = firestore.PartitionQueryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_partition_query(resp) + return resp + + class _Rollback(FirestoreRestStub): + def __hash__(self): + return hash("Rollback") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.RollbackRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the rollback method over HTTP. + + Args: + request (~.firestore.RollbackRequest): + The request object. The request for + [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/databases/*}/documents:rollback', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_rollback(request, metadata) + pb_request = firestore.RollbackRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _RunAggregationQuery(FirestoreRestStub): + def __hash__(self): + return hash("RunAggregationQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.RunAggregationQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> rest_streaming.ResponseIterator: + r"""Call the run aggregation query method over HTTP. + + Args: + request (~.firestore.RunAggregationQueryRequest): + The request object. The request for + [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore.RunAggregationQueryResponse: + The response for + [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/documents}:runAggregationQuery', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}:runAggregationQuery', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_run_aggregation_query(request, metadata) + pb_request = firestore.RunAggregationQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, firestore.RunAggregationQueryResponse) + resp = self._interceptor.post_run_aggregation_query(resp) + return resp + + class _RunQuery(FirestoreRestStub): + def __hash__(self): + return hash("RunQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.RunQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> rest_streaming.ResponseIterator: + r"""Call the run query method over HTTP. + + Args: + request (~.firestore.RunQueryRequest): + The request object. The request for + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore.RunQueryResponse: + The response for + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/documents}:runQuery', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_run_query(request, metadata) + pb_request = firestore.RunQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, firestore.RunQueryResponse) + resp = self._interceptor.post_run_query(resp) + return resp + + class _UpdateDocument(FirestoreRestStub): + def __hash__(self): + return hash("UpdateDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore.UpdateDocumentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> gf_document.Document: + r"""Call the update document method over HTTP. + + Args: + request (~.firestore.UpdateDocumentRequest): + The request object. The request for + [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gf_document.Document: + A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{document.name=projects/*/databases/*/documents/*/**}', + 'body': 'document', + }, + ] + request, metadata = self._interceptor.pre_update_document(request, metadata) + pb_request = firestore.UpdateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gf_document.Document() + pb_resp = gf_document.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_document(resp) + return resp + + class _Write(FirestoreRestStub): + def __hash__(self): + return hash("Write") + + def __call__(self, + request: firestore.WriteRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method Write is not available over REST transport" + ) + + @property + def batch_get_documents(self) -> Callable[ + [firestore.BatchGetDocumentsRequest], + firestore.BatchGetDocumentsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_write(self) -> Callable[ + [firestore.BatchWriteRequest], + firestore.BatchWriteResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchWrite(self._session, self._host, self._interceptor) # type: ignore + + @property + def begin_transaction(self) -> Callable[ + [firestore.BeginTransactionRequest], + firestore.BeginTransactionResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BeginTransaction(self._session, self._host, self._interceptor) # type: ignore + + @property + def commit(self) -> Callable[ + [firestore.CommitRequest], + firestore.CommitResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Commit(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_document(self) -> Callable[ + [firestore.CreateDocumentRequest], + document.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_document(self) -> Callable[ + [firestore.DeleteDocumentRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_document(self) -> Callable[ + [firestore.GetDocumentRequest], + document.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_collection_ids(self) -> Callable[ + [firestore.ListCollectionIdsRequest], + firestore.ListCollectionIdsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCollectionIds(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_documents(self) -> Callable[ + [firestore.ListDocumentsRequest], + firestore.ListDocumentsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def listen(self) -> Callable[ + [firestore.ListenRequest], + firestore.ListenResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Listen(self._session, self._host, self._interceptor) # type: ignore + + @property + def partition_query(self) -> Callable[ + [firestore.PartitionQueryRequest], + firestore.PartitionQueryResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PartitionQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def rollback(self) -> Callable[ + [firestore.RollbackRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Rollback(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_aggregation_query(self) -> Callable[ + [firestore.RunAggregationQueryRequest], + firestore.RunAggregationQueryResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunAggregationQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_query(self) -> Callable[ + [firestore.RunQueryRequest], + firestore.RunQueryResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_document(self) -> Callable[ + [firestore.UpdateDocumentRequest], + gf_document.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def write(self) -> Callable[ + [firestore.WriteRequest], + firestore.WriteResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Write(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(FirestoreRestStub): + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/databases/*/operations/*}:cancel', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(FirestoreRestStub): + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/databases/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(FirestoreRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(FirestoreRestStub): + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*}/operations', + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'FirestoreRestTransport', +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/__init__.py new file mode 100644 index 0000000000..772ccce027 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/__init__.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .aggregation_result import ( + AggregationResult, +) +from .bloom_filter import ( + BitSequence, + BloomFilter, +) +from .common import ( + DocumentMask, + Precondition, + TransactionOptions, +) +from .document import ( + ArrayValue, + Document, + MapValue, + Value, +) +from .firestore import ( + BatchGetDocumentsRequest, + BatchGetDocumentsResponse, + BatchWriteRequest, + BatchWriteResponse, + BeginTransactionRequest, + BeginTransactionResponse, + CommitRequest, + CommitResponse, + CreateDocumentRequest, + DeleteDocumentRequest, + GetDocumentRequest, + ListCollectionIdsRequest, + ListCollectionIdsResponse, + ListDocumentsRequest, + ListDocumentsResponse, + ListenRequest, + ListenResponse, + PartitionQueryRequest, + PartitionQueryResponse, + RollbackRequest, + RunAggregationQueryRequest, + RunAggregationQueryResponse, + RunQueryRequest, + RunQueryResponse, + Target, + TargetChange, + UpdateDocumentRequest, + WriteRequest, + WriteResponse, +) +from .query import ( + Cursor, + StructuredAggregationQuery, + StructuredQuery, +) +from .query_profile import ( + ExecutionStats, + ExplainMetrics, + ExplainOptions, + PlanSummary, +) +from .write import ( + DocumentChange, + DocumentDelete, + DocumentRemove, + DocumentTransform, + ExistenceFilter, + Write, + WriteResult, +) + +__all__ = ( + 'AggregationResult', + 'BitSequence', + 'BloomFilter', + 'DocumentMask', + 'Precondition', + 'TransactionOptions', + 'ArrayValue', + 'Document', + 'MapValue', + 'Value', + 'BatchGetDocumentsRequest', + 'BatchGetDocumentsResponse', + 'BatchWriteRequest', + 'BatchWriteResponse', + 'BeginTransactionRequest', + 'BeginTransactionResponse', + 'CommitRequest', + 'CommitResponse', + 'CreateDocumentRequest', + 'DeleteDocumentRequest', + 'GetDocumentRequest', + 'ListCollectionIdsRequest', + 'ListCollectionIdsResponse', + 'ListDocumentsRequest', + 'ListDocumentsResponse', + 'ListenRequest', + 'ListenResponse', + 'PartitionQueryRequest', + 'PartitionQueryResponse', + 'RollbackRequest', + 'RunAggregationQueryRequest', + 'RunAggregationQueryResponse', + 'RunQueryRequest', + 'RunQueryResponse', + 'Target', + 'TargetChange', + 'UpdateDocumentRequest', + 'WriteRequest', + 'WriteResponse', + 'Cursor', + 'StructuredAggregationQuery', + 'StructuredQuery', + 'ExecutionStats', + 'ExplainMetrics', + 'ExplainOptions', + 'PlanSummary', + 'DocumentChange', + 'DocumentDelete', + 'DocumentRemove', + 'DocumentTransform', + 'ExistenceFilter', + 'Write', + 'WriteResult', +) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/aggregation_result.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/aggregation_result.py new file mode 100644 index 0000000000..fac23cb061 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/aggregation_result.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.firestore_v1.types import document + + +__protobuf__ = proto.module( + package='google.firestore.v1', + manifest={ + 'AggregationResult', + }, +) + + +class AggregationResult(proto.Message): + r"""The result of a single bucket from a Firestore aggregation query. + + The keys of ``aggregate_fields`` are the same for all results in an + aggregation query, unlike document queries which can have different + fields present for each result. + + Attributes: + aggregate_fields (MutableMapping[str, google.cloud.firestore_v1.types.Value]): + The result of the aggregation functions, ex: + ``COUNT(*) AS total_docs``. + + The key is the + [alias][google.firestore.v1.StructuredAggregationQuery.Aggregation.alias] + assigned to the aggregation function on input and the size + of this map equals the number of aggregation functions in + the query. + """ + + aggregate_fields: MutableMapping[str, document.Value] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message=document.Value, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/bloom_filter.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/bloom_filter.py new file mode 100644 index 0000000000..592c1d6aa9 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/bloom_filter.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.v1', + manifest={ + 'BitSequence', + 'BloomFilter', + }, +) + + +class BitSequence(proto.Message): + r"""A sequence of bits, encoded in a byte array. + + Each byte in the ``bitmap`` byte array stores 8 bits of the + sequence. The only exception is the last byte, which may store 8 *or + fewer* bits. The ``padding`` defines the number of bits of the last + byte to be ignored as "padding". The values of these "padding" bits + are unspecified and must be ignored. + + To retrieve the first bit, bit 0, calculate: + ``(bitmap[0] & 0x01) != 0``. To retrieve the second bit, bit 1, + calculate: ``(bitmap[0] & 0x02) != 0``. To retrieve the third bit, + bit 2, calculate: ``(bitmap[0] & 0x04) != 0``. To retrieve the + fourth bit, bit 3, calculate: ``(bitmap[0] & 0x08) != 0``. To + retrieve bit n, calculate: + ``(bitmap[n / 8] & (0x01 << (n % 8))) != 0``. + + The "size" of a ``BitSequence`` (the number of bits it contains) is + calculated by this formula: ``(bitmap.length * 8) - padding``. + + Attributes: + bitmap (bytes): + The bytes that encode the bit sequence. + May have a length of zero. + padding (int): + The number of bits of the last byte in ``bitmap`` to ignore + as "padding". If the length of ``bitmap`` is zero, then this + value must be ``0``. Otherwise, this value must be between 0 + and 7, inclusive. + """ + + bitmap: bytes = proto.Field( + proto.BYTES, + number=1, + ) + padding: int = proto.Field( + proto.INT32, + number=2, + ) + + +class BloomFilter(proto.Message): + r"""A bloom filter (https://en.wikipedia.org/wiki/Bloom_filter). + + The bloom filter hashes the entries with MD5 and treats the + resulting 128-bit hash as 2 distinct 64-bit hash values, interpreted + as unsigned integers using 2's complement encoding. + + These two hash values, named ``h1`` and ``h2``, are then used to + compute the ``hash_count`` hash values using the formula, starting + at ``i=0``: + + :: + + h(i) = h1 + (i * h2) + + These resulting values are then taken modulo the number of bits in + the bloom filter to get the bits of the bloom filter to test for the + given entry. + + Attributes: + bits (google.cloud.firestore_v1.types.BitSequence): + The bloom filter data. + hash_count (int): + The number of hashes used by the algorithm. + """ + + bits: 'BitSequence' = proto.Field( + proto.MESSAGE, + number=1, + message='BitSequence', + ) + hash_count: int = proto.Field( + proto.INT32, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/common.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/common.py new file mode 100644 index 0000000000..674bc6515e --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/common.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.v1', + manifest={ + 'DocumentMask', + 'Precondition', + 'TransactionOptions', + }, +) + + +class DocumentMask(proto.Message): + r"""A set of field paths on a document. Used to restrict a get or update + operation on a document to a subset of its fields. This is different + from standard field masks, as this is always scoped to a + [Document][google.firestore.v1.Document], and takes in account the + dynamic nature of [Value][google.firestore.v1.Value]. + + Attributes: + field_paths (MutableSequence[str]): + The list of field paths in the mask. See + [Document.fields][google.firestore.v1.Document.fields] for a + field path syntax reference. + """ + + field_paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class Precondition(proto.Message): + r"""A precondition on a document, used for conditional + operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + exists (bool): + When set to ``true``, the target document must exist. When + set to ``false``, the target document must not exist. + + This field is a member of `oneof`_ ``condition_type``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + When set, the target document must exist and + have been last updated at that time. Timestamp + must be microsecond aligned. + + This field is a member of `oneof`_ ``condition_type``. + """ + + exists: bool = proto.Field( + proto.BOOL, + number=1, + oneof='condition_type', + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + oneof='condition_type', + message=timestamp_pb2.Timestamp, + ) + + +class TransactionOptions(proto.Message): + r"""Options for creating a new transaction. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + read_only (google.cloud.firestore_v1.types.TransactionOptions.ReadOnly): + The transaction can only be used for read + operations. + + This field is a member of `oneof`_ ``mode``. + read_write (google.cloud.firestore_v1.types.TransactionOptions.ReadWrite): + The transaction can be used for both read and + write operations. + + This field is a member of `oneof`_ ``mode``. + """ + + class ReadWrite(proto.Message): + r"""Options for a transaction that can be used to read and write + documents. + Firestore does not allow 3rd party auth requests to create + read-write. transactions. + + Attributes: + retry_transaction (bytes): + An optional transaction to retry. + """ + + retry_transaction: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + class ReadOnly(proto.Message): + r"""Options for a transaction that can only be used to read + documents. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads documents at the given time. + + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. + + This field is a member of `oneof`_ ``consistency_selector``. + """ + + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + oneof='consistency_selector', + message=timestamp_pb2.Timestamp, + ) + + read_only: ReadOnly = proto.Field( + proto.MESSAGE, + number=2, + oneof='mode', + message=ReadOnly, + ) + read_write: ReadWrite = proto.Field( + proto.MESSAGE, + number=3, + oneof='mode', + message=ReadWrite, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/document.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/document.py new file mode 100644 index 0000000000..a60b6609f7 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/document.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.v1', + manifest={ + 'Document', + 'Value', + 'ArrayValue', + 'MapValue', + }, +) + + +class Document(proto.Message): + r"""A Firestore document. + + Must not exceed 1 MiB - 4 bytes. + + Attributes: + name (str): + The resource name of the document, for example + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + fields (MutableMapping[str, google.cloud.firestore_v1.types.Value]): + The document's fields. + + The map keys represent field names. + + Field names matching the regular expression ``__.*__`` are + reserved. Reserved field names are forbidden except in + certain documented contexts. The field names, represented as + UTF-8, must not exceed 1,500 bytes and cannot be empty. + + Field paths may be used in other contexts to refer to + structured fields defined here. For ``map_value``, the field + path is represented by a dot-delimited (``.``) string of + segments. Each segment is either a simple field name + (defined below) or a quoted field name. For example, the + structured field + ``"foo" : { map_value: { "x&y" : { string_value: "hello" }}}`` + would be represented by the field path + :literal:`foo.`x&y\``. + + A simple field name contains only characters ``a`` to ``z``, + ``A`` to ``Z``, ``0`` to ``9``, or ``_``, and must not start + with ``0`` to ``9``. For example, ``foo_bar_17``. + + A quoted field name starts and ends with :literal:`\`` and + may contain any character. Some characters, including + :literal:`\``, must be escaped using a ``\``. For example, + :literal:`\`x&y\`` represents ``x&y`` and + :literal:`\`bak\`tik\`` represents :literal:`bak`tik`. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the document was created. + + This value increases monotonically when a document is + deleted then recreated. It can also be compared to values + from other documents and the ``read_time`` of a query. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the document was last + changed. + + This value is initially set to the ``create_time`` then + increases monotonically with each change to the document. It + can also be compared to values from other documents and the + ``read_time`` of a query. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + fields: MutableMapping[str, 'Value'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message='Value', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class Value(proto.Message): + r"""A message that can hold any of the supported value types. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + null_value (google.protobuf.struct_pb2.NullValue): + A null value. + + This field is a member of `oneof`_ ``value_type``. + boolean_value (bool): + A boolean value. + + This field is a member of `oneof`_ ``value_type``. + integer_value (int): + An integer value. + + This field is a member of `oneof`_ ``value_type``. + double_value (float): + A double value. + + This field is a member of `oneof`_ ``value_type``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + A timestamp value. + + Precise only to microseconds. When stored, any + additional precision is rounded down. + + This field is a member of `oneof`_ ``value_type``. + string_value (str): + A string value. + + The string, represented as UTF-8, must not + exceed 1 MiB - 89 bytes. Only the first 1,500 + bytes of the UTF-8 representation are considered + by queries. + + This field is a member of `oneof`_ ``value_type``. + bytes_value (bytes): + A bytes value. + + Must not exceed 1 MiB - 89 bytes. + Only the first 1,500 bytes are considered by + queries. + + This field is a member of `oneof`_ ``value_type``. + reference_value (str): + A reference to a document. For example: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + + This field is a member of `oneof`_ ``value_type``. + geo_point_value (google.type.latlng_pb2.LatLng): + A geo point value representing a point on the + surface of Earth. + + This field is a member of `oneof`_ ``value_type``. + array_value (google.cloud.firestore_v1.types.ArrayValue): + An array value. + + Cannot directly contain another array value, + though can contain an map which contains another + array. + + This field is a member of `oneof`_ ``value_type``. + map_value (google.cloud.firestore_v1.types.MapValue): + A map value. + + This field is a member of `oneof`_ ``value_type``. + """ + + null_value: struct_pb2.NullValue = proto.Field( + proto.ENUM, + number=11, + oneof='value_type', + enum=struct_pb2.NullValue, + ) + boolean_value: bool = proto.Field( + proto.BOOL, + number=1, + oneof='value_type', + ) + integer_value: int = proto.Field( + proto.INT64, + number=2, + oneof='value_type', + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=3, + oneof='value_type', + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + oneof='value_type', + message=timestamp_pb2.Timestamp, + ) + string_value: str = proto.Field( + proto.STRING, + number=17, + oneof='value_type', + ) + bytes_value: bytes = proto.Field( + proto.BYTES, + number=18, + oneof='value_type', + ) + reference_value: str = proto.Field( + proto.STRING, + number=5, + oneof='value_type', + ) + geo_point_value: latlng_pb2.LatLng = proto.Field( + proto.MESSAGE, + number=8, + oneof='value_type', + message=latlng_pb2.LatLng, + ) + array_value: 'ArrayValue' = proto.Field( + proto.MESSAGE, + number=9, + oneof='value_type', + message='ArrayValue', + ) + map_value: 'MapValue' = proto.Field( + proto.MESSAGE, + number=6, + oneof='value_type', + message='MapValue', + ) + + +class ArrayValue(proto.Message): + r"""An array value. + + Attributes: + values (MutableSequence[google.cloud.firestore_v1.types.Value]): + Values in the array. + """ + + values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + + +class MapValue(proto.Message): + r"""A map value. + + Attributes: + fields (MutableMapping[str, google.cloud.firestore_v1.types.Value]): + The map's fields. + + The map keys represent field names. Field names matching the + regular expression ``__.*__`` are reserved. Reserved field + names are forbidden except in certain documented contexts. + The map keys, represented as UTF-8, must not exceed 1,500 + bytes and cannot be empty. + """ + + fields: MutableMapping[str, 'Value'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message='Value', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/firestore.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/firestore.py new file mode 100644 index 0000000000..898b12f7b4 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/firestore.py @@ -0,0 +1,1758 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.firestore_v1.types import aggregation_result +from google.cloud.firestore_v1.types import common +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import query as gf_query +from google.cloud.firestore_v1.types import query_profile +from google.cloud.firestore_v1.types import write +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.v1', + manifest={ + 'GetDocumentRequest', + 'ListDocumentsRequest', + 'ListDocumentsResponse', + 'CreateDocumentRequest', + 'UpdateDocumentRequest', + 'DeleteDocumentRequest', + 'BatchGetDocumentsRequest', + 'BatchGetDocumentsResponse', + 'BeginTransactionRequest', + 'BeginTransactionResponse', + 'CommitRequest', + 'CommitResponse', + 'RollbackRequest', + 'RunQueryRequest', + 'RunQueryResponse', + 'RunAggregationQueryRequest', + 'RunAggregationQueryResponse', + 'PartitionQueryRequest', + 'PartitionQueryResponse', + 'WriteRequest', + 'WriteResponse', + 'ListenRequest', + 'ListenResponse', + 'Target', + 'TargetChange', + 'ListCollectionIdsRequest', + 'ListCollectionIdsResponse', + 'BatchWriteRequest', + 'BatchWriteResponse', + }, +) + + +class GetDocumentRequest(proto.Message): + r"""The request for + [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the Document to get. In the + format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + mask (google.cloud.firestore_v1.types.DocumentMask): + The fields to return. If not set, returns all + fields. + If the document has a field that is not present + in this mask, that field will not be returned in + the response. + transaction (bytes): + Reads the document in a transaction. + + This field is a member of `oneof`_ ``consistency_selector``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads the version of the document at the + given time. + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. + + This field is a member of `oneof`_ ``consistency_selector``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + mask: common.DocumentMask = proto.Field( + proto.MESSAGE, + number=2, + message=common.DocumentMask, + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=3, + oneof='consistency_selector', + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + oneof='consistency_selector', + message=timestamp_pb2.Timestamp, + ) + + +class ListDocumentsRequest(proto.Message): + r"""The request for + [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The parent resource name. In the format: + ``projects/{project_id}/databases/{database_id}/documents`` + or + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + + For example: + ``projects/my-project/databases/my-database/documents`` or + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + collection_id (str): + Optional. The collection ID, relative to ``parent``, to + list. + + For example: ``chatrooms`` or ``messages``. + + This is optional, and when not provided, Firestore will list + documents from all collections under the provided + ``parent``. + page_size (int): + Optional. The maximum number of documents to + return in a single response. + Firestore may return fewer than this value. + page_token (str): + Optional. A page token, received from a previous + ``ListDocuments`` response. + + Provide this to retrieve the subsequent page. When + paginating, all other parameters (with the exception of + ``page_size``) must match the values set in the request that + generated the page token. + order_by (str): + Optional. The optional ordering of the documents to return. + + For example: ``priority desc, __name__ desc``. + + This mirrors the + [``ORDER BY``][google.firestore.v1.StructuredQuery.order_by] + used in Firestore queries but in a string representation. + When absent, documents are ordered based on + ``__name__ ASC``. + mask (google.cloud.firestore_v1.types.DocumentMask): + Optional. The fields to return. If not set, + returns all fields. + If a document has a field that is not present in + this mask, that field will not be returned in + the response. + transaction (bytes): + Perform the read as part of an already active + transaction. + + This field is a member of `oneof`_ ``consistency_selector``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Perform the read at the provided time. + + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. + + This field is a member of `oneof`_ ``consistency_selector``. + show_missing (bool): + If the list should show missing documents. + + A document is missing if it does not exist, but there are + sub-documents nested underneath it. When true, such missing + documents will be returned with a key but will not have + fields, + [``create_time``][google.firestore.v1.Document.create_time], + or + [``update_time``][google.firestore.v1.Document.update_time] + set. + + Requests with ``show_missing`` may not specify ``where`` or + ``order_by``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + collection_id: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=6, + ) + mask: common.DocumentMask = proto.Field( + proto.MESSAGE, + number=7, + message=common.DocumentMask, + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=8, + oneof='consistency_selector', + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + oneof='consistency_selector', + message=timestamp_pb2.Timestamp, + ) + show_missing: bool = proto.Field( + proto.BOOL, + number=12, + ) + + +class ListDocumentsResponse(proto.Message): + r"""The response for + [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. + + Attributes: + documents (MutableSequence[google.cloud.firestore_v1.types.Document]): + The Documents found. + next_page_token (str): + A token to retrieve the next page of + documents. + If this field is omitted, there are no + subsequent pages. + """ + + @property + def raw_page(self): + return self + + documents: MutableSequence[gf_document.Document] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gf_document.Document, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDocumentRequest(proto.Message): + r"""The request for + [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. + + Attributes: + parent (str): + Required. The parent resource. For example: + ``projects/{project_id}/databases/{database_id}/documents`` + or + ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}`` + collection_id (str): + Required. The collection ID, relative to ``parent``, to + list. For example: ``chatrooms``. + document_id (str): + The client-assigned document ID to use for + this document. + Optional. If not specified, an ID will be + assigned by the service. + document (google.cloud.firestore_v1.types.Document): + Required. The document to create. ``name`` must not be set. + mask (google.cloud.firestore_v1.types.DocumentMask): + The fields to return. If not set, returns all + fields. + If the document has a field that is not present + in this mask, that field will not be returned in + the response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + collection_id: str = proto.Field( + proto.STRING, + number=2, + ) + document_id: str = proto.Field( + proto.STRING, + number=3, + ) + document: gf_document.Document = proto.Field( + proto.MESSAGE, + number=4, + message=gf_document.Document, + ) + mask: common.DocumentMask = proto.Field( + proto.MESSAGE, + number=5, + message=common.DocumentMask, + ) + + +class UpdateDocumentRequest(proto.Message): + r"""The request for + [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. + + Attributes: + document (google.cloud.firestore_v1.types.Document): + Required. The updated document. + Creates the document if it does not already + exist. + update_mask (google.cloud.firestore_v1.types.DocumentMask): + The fields to update. + None of the field paths in the mask may contain + a reserved name. + + If the document exists on the server and has + fields not referenced in the mask, they are left + unchanged. + Fields referenced in the mask, but not present + in the input document, are deleted from the + document on the server. + mask (google.cloud.firestore_v1.types.DocumentMask): + The fields to return. If not set, returns all + fields. + If the document has a field that is not present + in this mask, that field will not be returned in + the response. + current_document (google.cloud.firestore_v1.types.Precondition): + An optional precondition on the document. + The request will fail if this is set and not met + by the target document. + """ + + document: gf_document.Document = proto.Field( + proto.MESSAGE, + number=1, + message=gf_document.Document, + ) + update_mask: common.DocumentMask = proto.Field( + proto.MESSAGE, + number=2, + message=common.DocumentMask, + ) + mask: common.DocumentMask = proto.Field( + proto.MESSAGE, + number=3, + message=common.DocumentMask, + ) + current_document: common.Precondition = proto.Field( + proto.MESSAGE, + number=4, + message=common.Precondition, + ) + + +class DeleteDocumentRequest(proto.Message): + r"""The request for + [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. + + Attributes: + name (str): + Required. The resource name of the Document to delete. In + the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + current_document (google.cloud.firestore_v1.types.Precondition): + An optional precondition on the document. + The request will fail if this is set and not met + by the target document. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + current_document: common.Precondition = proto.Field( + proto.MESSAGE, + number=2, + message=common.Precondition, + ) + + +class BatchGetDocumentsRequest(proto.Message): + r"""The request for + [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + documents (MutableSequence[str]): + The names of the documents to retrieve. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + The request will fail if any of the document is not a child + resource of the given ``database``. Duplicate names will be + elided. + mask (google.cloud.firestore_v1.types.DocumentMask): + The fields to return. If not set, returns all + fields. + If a document has a field that is not present in + this mask, that field will not be returned in + the response. + transaction (bytes): + Reads documents in a transaction. + + This field is a member of `oneof`_ ``consistency_selector``. + new_transaction (google.cloud.firestore_v1.types.TransactionOptions): + Starts a new transaction and reads the + documents. Defaults to a read-only transaction. + The new transaction ID will be returned as the + first response in the stream. + + This field is a member of `oneof`_ ``consistency_selector``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads documents as they were at the given + time. + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. + + This field is a member of `oneof`_ ``consistency_selector``. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + mask: common.DocumentMask = proto.Field( + proto.MESSAGE, + number=3, + message=common.DocumentMask, + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=4, + oneof='consistency_selector', + ) + new_transaction: common.TransactionOptions = proto.Field( + proto.MESSAGE, + number=5, + oneof='consistency_selector', + message=common.TransactionOptions, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + oneof='consistency_selector', + message=timestamp_pb2.Timestamp, + ) + + +class BatchGetDocumentsResponse(proto.Message): + r"""The streamed response for + [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + found (google.cloud.firestore_v1.types.Document): + A document that was requested. + + This field is a member of `oneof`_ ``result``. + missing (str): + A document name that was requested but does not exist. In + the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + + This field is a member of `oneof`_ ``result``. + transaction (bytes): + The transaction that was started as part of this request. + Will only be set in the first response, and only if + [BatchGetDocumentsRequest.new_transaction][google.firestore.v1.BatchGetDocumentsRequest.new_transaction] + was set in the request. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the document was read. This may be + monotically increasing, in this case the previous documents + in the result stream are guaranteed not to have changed + between their read_time and this one. + """ + + found: gf_document.Document = proto.Field( + proto.MESSAGE, + number=1, + oneof='result', + message=gf_document.Document, + ) + missing: str = proto.Field( + proto.STRING, + number=2, + oneof='result', + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=3, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class BeginTransactionRequest(proto.Message): + r"""The request for + [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. + + Attributes: + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + options (google.cloud.firestore_v1.types.TransactionOptions): + The options for the transaction. + Defaults to a read-write transaction. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + options: common.TransactionOptions = proto.Field( + proto.MESSAGE, + number=2, + message=common.TransactionOptions, + ) + + +class BeginTransactionResponse(proto.Message): + r"""The response for + [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. + + Attributes: + transaction (bytes): + The transaction that was started. + """ + + transaction: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class CommitRequest(proto.Message): + r"""The request for + [Firestore.Commit][google.firestore.v1.Firestore.Commit]. + + Attributes: + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + writes (MutableSequence[google.cloud.firestore_v1.types.Write]): + The writes to apply. + + Always executed atomically and in order. + transaction (bytes): + If set, applies all writes in this + transaction, and commits it. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + writes: MutableSequence[write.Write] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=write.Write, + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=3, + ) + + +class CommitResponse(proto.Message): + r"""The response for + [Firestore.Commit][google.firestore.v1.Firestore.Commit]. + + Attributes: + write_results (MutableSequence[google.cloud.firestore_v1.types.WriteResult]): + The result of applying the writes. + + This i-th write result corresponds to the i-th + write in the request. + commit_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the commit occurred. Any read with an + equal or greater ``read_time`` is guaranteed to see the + effects of the commit. + """ + + write_results: MutableSequence[write.WriteResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=write.WriteResult, + ) + commit_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class RollbackRequest(proto.Message): + r"""The request for + [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. + + Attributes: + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + transaction (bytes): + Required. The transaction to roll back. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class RunQueryRequest(proto.Message): + r"""The request for + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The parent resource name. In the format: + ``projects/{project_id}/databases/{database_id}/documents`` + or + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + For example: + ``projects/my-project/databases/my-database/documents`` or + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + structured_query (google.cloud.firestore_v1.types.StructuredQuery): + A structured query. + + This field is a member of `oneof`_ ``query_type``. + transaction (bytes): + Run the query within an already active + transaction. + The value here is the opaque transaction ID to + execute the query in. + + This field is a member of `oneof`_ ``consistency_selector``. + new_transaction (google.cloud.firestore_v1.types.TransactionOptions): + Starts a new transaction and reads the + documents. Defaults to a read-only transaction. + The new transaction ID will be returned as the + first response in the stream. + + This field is a member of `oneof`_ ``consistency_selector``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads documents as they were at the given + time. + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. + + This field is a member of `oneof`_ ``consistency_selector``. + explain_options (google.cloud.firestore_v1.types.ExplainOptions): + Optional. Explain options for the query. If + set, additional query statistics will be + returned. If not, only query results will be + returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + structured_query: gf_query.StructuredQuery = proto.Field( + proto.MESSAGE, + number=2, + oneof='query_type', + message=gf_query.StructuredQuery, + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=5, + oneof='consistency_selector', + ) + new_transaction: common.TransactionOptions = proto.Field( + proto.MESSAGE, + number=6, + oneof='consistency_selector', + message=common.TransactionOptions, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + oneof='consistency_selector', + message=timestamp_pb2.Timestamp, + ) + explain_options: query_profile.ExplainOptions = proto.Field( + proto.MESSAGE, + number=10, + message=query_profile.ExplainOptions, + ) + + +class RunQueryResponse(proto.Message): + r"""The response for + [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transaction (bytes): + The transaction that was started as part of this request. + Can only be set in the first response, and only if + [RunQueryRequest.new_transaction][google.firestore.v1.RunQueryRequest.new_transaction] + was set in the request. If set, no other fields will be set + in this response. + document (google.cloud.firestore_v1.types.Document): + A query result, not set when reporting + partial progress. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the document was read. This may be + monotonically increasing; in this case, the previous + documents in the result stream are guaranteed not to have + changed between their ``read_time`` and this one. + + If the query returns no results, a response with + ``read_time`` and no ``document`` will be sent, and this + represents the time at which the query was run. + skipped_results (int): + The number of results that have been skipped + due to an offset between the last response and + the current response. + done (bool): + If present, Firestore has completely finished + the request and no more documents will be + returned. + + This field is a member of `oneof`_ ``continuation_selector``. + explain_metrics (google.cloud.firestore_v1.types.ExplainMetrics): + Query explain metrics. This is only present when the + [RunQueryRequest.explain_options][google.firestore.v1.RunQueryRequest.explain_options] + is provided, and it is sent only once with the last response + in the stream. + """ + + transaction: bytes = proto.Field( + proto.BYTES, + number=2, + ) + document: gf_document.Document = proto.Field( + proto.MESSAGE, + number=1, + message=gf_document.Document, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + skipped_results: int = proto.Field( + proto.INT32, + number=4, + ) + done: bool = proto.Field( + proto.BOOL, + number=6, + oneof='continuation_selector', + ) + explain_metrics: query_profile.ExplainMetrics = proto.Field( + proto.MESSAGE, + number=11, + message=query_profile.ExplainMetrics, + ) + + +class RunAggregationQueryRequest(proto.Message): + r"""The request for + [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The parent resource name. In the format: + ``projects/{project_id}/databases/{database_id}/documents`` + or + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + For example: + ``projects/my-project/databases/my-database/documents`` or + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + structured_aggregation_query (google.cloud.firestore_v1.types.StructuredAggregationQuery): + An aggregation query. + + This field is a member of `oneof`_ ``query_type``. + transaction (bytes): + Run the aggregation within an already active + transaction. + The value here is the opaque transaction ID to + execute the query in. + + This field is a member of `oneof`_ ``consistency_selector``. + new_transaction (google.cloud.firestore_v1.types.TransactionOptions): + Starts a new transaction as part of the + query, defaulting to read-only. + The new transaction ID will be returned as the + first response in the stream. + + This field is a member of `oneof`_ ``consistency_selector``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Executes the query at the given timestamp. + + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. + + This field is a member of `oneof`_ ``consistency_selector``. + explain_options (google.cloud.firestore_v1.types.ExplainOptions): + Optional. Explain options for the query. If + set, additional query statistics will be + returned. If not, only query results will be + returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + structured_aggregation_query: gf_query.StructuredAggregationQuery = proto.Field( + proto.MESSAGE, + number=2, + oneof='query_type', + message=gf_query.StructuredAggregationQuery, + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=4, + oneof='consistency_selector', + ) + new_transaction: common.TransactionOptions = proto.Field( + proto.MESSAGE, + number=5, + oneof='consistency_selector', + message=common.TransactionOptions, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + oneof='consistency_selector', + message=timestamp_pb2.Timestamp, + ) + explain_options: query_profile.ExplainOptions = proto.Field( + proto.MESSAGE, + number=8, + message=query_profile.ExplainOptions, + ) + + +class RunAggregationQueryResponse(proto.Message): + r"""The response for + [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. + + Attributes: + result (google.cloud.firestore_v1.types.AggregationResult): + A single aggregation result. + + Not present when reporting partial progress. + transaction (bytes): + The transaction that was started as part of + this request. + Only present on the first response when the + request requested to start a new transaction. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the aggregate result was computed. This is + always monotonically increasing; in this case, the previous + AggregationResult in the result stream are guaranteed not to + have changed between their ``read_time`` and this one. + + If the query returns no results, a response with + ``read_time`` and no ``result`` will be sent, and this + represents the time at which the query was run. + explain_metrics (google.cloud.firestore_v1.types.ExplainMetrics): + Query explain metrics. This is only present when the + [RunAggregationQueryRequest.explain_options][google.firestore.v1.RunAggregationQueryRequest.explain_options] + is provided, and it is sent only once with the last response + in the stream. + """ + + result: aggregation_result.AggregationResult = proto.Field( + proto.MESSAGE, + number=1, + message=aggregation_result.AggregationResult, + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=2, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + explain_metrics: query_profile.ExplainMetrics = proto.Field( + proto.MESSAGE, + number=10, + message=query_profile.ExplainMetrics, + ) + + +class PartitionQueryRequest(proto.Message): + r"""The request for + [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The parent resource name. In the format: + ``projects/{project_id}/databases/{database_id}/documents``. + Document resource names are not supported; only database + resource names can be specified. + structured_query (google.cloud.firestore_v1.types.StructuredQuery): + A structured query. + Query must specify collection with all + descendants and be ordered by name ascending. + Other filters, order bys, limits, offsets, and + start/end cursors are not supported. + + This field is a member of `oneof`_ ``query_type``. + partition_count (int): + The desired maximum number of partition + points. The partitions may be returned across + multiple pages of results. The number must be + positive. The actual number of partitions + returned may be fewer. + + For example, this may be set to one fewer than + the number of parallel queries to be run, or in + running a data pipeline job, one fewer than the + number of workers or compute instances + available. + page_token (str): + The ``next_page_token`` value returned from a previous call + to PartitionQuery that may be used to get an additional set + of results. There are no ordering guarantees between sets of + results. Thus, using multiple sets of results will require + merging the different result sets. + + For example, two subsequent calls using a page_token may + return: + + - cursor B, cursor M, cursor Q + - cursor A, cursor U, cursor W + + To obtain a complete result set ordered with respect to the + results of the query supplied to PartitionQuery, the results + sets should be merged: cursor A, cursor B, cursor M, cursor + Q, cursor U, cursor W + page_size (int): + The maximum number of partitions to return in this call, + subject to ``partition_count``. + + For example, if ``partition_count`` = 10 and ``page_size`` = + 8, the first call to PartitionQuery will return up to 8 + partitions and a ``next_page_token`` if more results exist. + A second call to PartitionQuery will return up to 2 + partitions, to complete the total of 10 specified in + ``partition_count``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads documents as they were at the given + time. + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. + + This field is a member of `oneof`_ ``consistency_selector``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + structured_query: gf_query.StructuredQuery = proto.Field( + proto.MESSAGE, + number=2, + oneof='query_type', + message=gf_query.StructuredQuery, + ) + partition_count: int = proto.Field( + proto.INT64, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + page_size: int = proto.Field( + proto.INT32, + number=5, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + oneof='consistency_selector', + message=timestamp_pb2.Timestamp, + ) + + +class PartitionQueryResponse(proto.Message): + r"""The response for + [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. + + Attributes: + partitions (MutableSequence[google.cloud.firestore_v1.types.Cursor]): + Partition results. Each partition is a split point that can + be used by RunQuery as a starting or end point for the query + results. The RunQuery requests must be made with the same + query supplied to this PartitionQuery request. The partition + cursors will be ordered according to same ordering as the + results of the query supplied to PartitionQuery. + + For example, if a PartitionQuery request returns partition + cursors A and B, running the following three queries will + return the entire result set of the original query: + + - query, end_at A + - query, start_at A, end_at B + - query, start_at B + + An empty result may indicate that the query has too few + results to be partitioned, or that the query is not yet + supported for partitioning. + next_page_token (str): + A page token that may be used to request an additional set + of results, up to the number specified by + ``partition_count`` in the PartitionQuery request. If blank, + there are no more results. + """ + + @property + def raw_page(self): + return self + + partitions: MutableSequence[gf_query.Cursor] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gf_query.Cursor, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class WriteRequest(proto.Message): + r"""The request for + [Firestore.Write][google.firestore.v1.Firestore.Write]. + + The first request creates a stream, or resumes an existing one from + a token. + + When creating a new stream, the server replies with a response + containing only an ID and a token, to use in the next request. + + When resuming a stream, the server first streams any responses later + than the given token, then a response containing only an up-to-date + token, to use in the next request. + + Attributes: + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. This is + only required in the first message. + stream_id (str): + The ID of the write stream to resume. + This may only be set in the first message. When + left empty, a new write stream will be created. + writes (MutableSequence[google.cloud.firestore_v1.types.Write]): + The writes to apply. + + Always executed atomically and in order. + This must be empty on the first request. + This may be empty on the last request. + This must not be empty on all other requests. + stream_token (bytes): + A stream token that was previously sent by the server. + + The client should set this field to the token from the most + recent [WriteResponse][google.firestore.v1.WriteResponse] it + has received. This acknowledges that the client has received + responses up to this token. After sending this token, + earlier tokens may not be used anymore. + + The server may close the stream if there are too many + unacknowledged responses. + + Leave this field unset when creating a new stream. To resume + a stream at a specific point, set this field and the + ``stream_id`` field. + + Leave this field unset when creating a new stream. + labels (MutableMapping[str, str]): + Labels associated with this write request. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + stream_id: str = proto.Field( + proto.STRING, + number=2, + ) + writes: MutableSequence[write.Write] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=write.Write, + ) + stream_token: bytes = proto.Field( + proto.BYTES, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class WriteResponse(proto.Message): + r"""The response for + [Firestore.Write][google.firestore.v1.Firestore.Write]. + + Attributes: + stream_id (str): + The ID of the stream. + Only set on the first message, when a new stream + was created. + stream_token (bytes): + A token that represents the position of this + response in the stream. This can be used by a + client to resume the stream at this point. + + This field is always set. + write_results (MutableSequence[google.cloud.firestore_v1.types.WriteResult]): + The result of applying the writes. + + This i-th write result corresponds to the i-th + write in the request. + commit_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the commit occurred. Any read with an + equal or greater ``read_time`` is guaranteed to see the + effects of the write. + """ + + stream_id: str = proto.Field( + proto.STRING, + number=1, + ) + stream_token: bytes = proto.Field( + proto.BYTES, + number=2, + ) + write_results: MutableSequence[write.WriteResult] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=write.WriteResult, + ) + commit_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class ListenRequest(proto.Message): + r"""A request for + [Firestore.Listen][google.firestore.v1.Firestore.Listen] + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + add_target (google.cloud.firestore_v1.types.Target): + A target to add to this stream. + + This field is a member of `oneof`_ ``target_change``. + remove_target (int): + The ID of a target to remove from this + stream. + + This field is a member of `oneof`_ ``target_change``. + labels (MutableMapping[str, str]): + Labels associated with this target change. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + add_target: 'Target' = proto.Field( + proto.MESSAGE, + number=2, + oneof='target_change', + message='Target', + ) + remove_target: int = proto.Field( + proto.INT32, + number=3, + oneof='target_change', + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class ListenResponse(proto.Message): + r"""The response for + [Firestore.Listen][google.firestore.v1.Firestore.Listen]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_change (google.cloud.firestore_v1.types.TargetChange): + Targets have changed. + + This field is a member of `oneof`_ ``response_type``. + document_change (google.cloud.firestore_v1.types.DocumentChange): + A [Document][google.firestore.v1.Document] has changed. + + This field is a member of `oneof`_ ``response_type``. + document_delete (google.cloud.firestore_v1.types.DocumentDelete): + A [Document][google.firestore.v1.Document] has been deleted. + + This field is a member of `oneof`_ ``response_type``. + document_remove (google.cloud.firestore_v1.types.DocumentRemove): + A [Document][google.firestore.v1.Document] has been removed + from a target (because it is no longer relevant to that + target). + + This field is a member of `oneof`_ ``response_type``. + filter (google.cloud.firestore_v1.types.ExistenceFilter): + A filter to apply to the set of documents + previously returned for the given target. + + Returned when documents may have been removed + from the given target, but the exact documents + are unknown. + + This field is a member of `oneof`_ ``response_type``. + """ + + target_change: 'TargetChange' = proto.Field( + proto.MESSAGE, + number=2, + oneof='response_type', + message='TargetChange', + ) + document_change: write.DocumentChange = proto.Field( + proto.MESSAGE, + number=3, + oneof='response_type', + message=write.DocumentChange, + ) + document_delete: write.DocumentDelete = proto.Field( + proto.MESSAGE, + number=4, + oneof='response_type', + message=write.DocumentDelete, + ) + document_remove: write.DocumentRemove = proto.Field( + proto.MESSAGE, + number=6, + oneof='response_type', + message=write.DocumentRemove, + ) + filter: write.ExistenceFilter = proto.Field( + proto.MESSAGE, + number=5, + oneof='response_type', + message=write.ExistenceFilter, + ) + + +class Target(proto.Message): + r"""A specification of a set of documents to listen to. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + query (google.cloud.firestore_v1.types.Target.QueryTarget): + A target specified by a query. + + This field is a member of `oneof`_ ``target_type``. + documents (google.cloud.firestore_v1.types.Target.DocumentsTarget): + A target specified by a set of document + names. + + This field is a member of `oneof`_ ``target_type``. + resume_token (bytes): + A resume token from a prior + [TargetChange][google.firestore.v1.TargetChange] for an + identical target. + + Using a resume token with a different target is unsupported + and may fail. + + This field is a member of `oneof`_ ``resume_type``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Start listening after a specific ``read_time``. + + The client must know the state of matching documents at this + time. + + This field is a member of `oneof`_ ``resume_type``. + target_id (int): + The target ID that identifies the target on the stream. Must + be a positive number and non-zero. + + If ``target_id`` is 0 (or unspecified), the server will + assign an ID for this target and return that in a + ``TargetChange::ADD`` event. Once a target with + ``target_id=0`` is added, all subsequent targets must also + have ``target_id=0``. If an ``AddTarget`` request with + ``target_id != 0`` is sent to the server after a target with + ``target_id=0`` is added, the server will immediately send a + response with a ``TargetChange::Remove`` event. + + Note that if the client sends multiple ``AddTarget`` + requests without an ID, the order of IDs returned in + ``TargetChage.target_ids`` are undefined. Therefore, clients + should provide a target ID instead of relying on the server + to assign one. + + If ``target_id`` is non-zero, there must not be an existing + active target on this stream with the same ID. + once (bool): + If the target should be removed once it is + current and consistent. + expected_count (google.protobuf.wrappers_pb2.Int32Value): + The number of documents that last matched the query at the + resume token or read time. + + This value is only relevant when a ``resume_type`` is + provided. This value being present and greater than zero + signals that the client wants + ``ExistenceFilter.unchanged_names`` to be included in the + response. + """ + + class DocumentsTarget(proto.Message): + r"""A target specified by a set of documents names. + + Attributes: + documents (MutableSequence[str]): + The names of the documents to retrieve. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + The request will fail if any of the document is not a child + resource of the given ``database``. Duplicate names will be + elided. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class QueryTarget(proto.Message): + r"""A target specified by a query. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + The parent resource name. In the format: + ``projects/{project_id}/databases/{database_id}/documents`` + or + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + For example: + ``projects/my-project/databases/my-database/documents`` or + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + structured_query (google.cloud.firestore_v1.types.StructuredQuery): + A structured query. + + This field is a member of `oneof`_ ``query_type``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + structured_query: gf_query.StructuredQuery = proto.Field( + proto.MESSAGE, + number=2, + oneof='query_type', + message=gf_query.StructuredQuery, + ) + + query: QueryTarget = proto.Field( + proto.MESSAGE, + number=2, + oneof='target_type', + message=QueryTarget, + ) + documents: DocumentsTarget = proto.Field( + proto.MESSAGE, + number=3, + oneof='target_type', + message=DocumentsTarget, + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=4, + oneof='resume_type', + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + oneof='resume_type', + message=timestamp_pb2.Timestamp, + ) + target_id: int = proto.Field( + proto.INT32, + number=5, + ) + once: bool = proto.Field( + proto.BOOL, + number=6, + ) + expected_count: wrappers_pb2.Int32Value = proto.Field( + proto.MESSAGE, + number=12, + message=wrappers_pb2.Int32Value, + ) + + +class TargetChange(proto.Message): + r"""Targets being watched have changed. + + Attributes: + target_change_type (google.cloud.firestore_v1.types.TargetChange.TargetChangeType): + The type of change that occurred. + target_ids (MutableSequence[int]): + The target IDs of targets that have changed. + + If empty, the change applies to all targets. + + The order of the target IDs is not defined. + cause (google.rpc.status_pb2.Status): + The error that resulted in this change, if + applicable. + resume_token (bytes): + A token that can be used to resume the stream for the given + ``target_ids``, or all targets if ``target_ids`` is empty. + + Not set on every target change. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The consistent ``read_time`` for the given ``target_ids`` + (omitted when the target_ids are not at a consistent + snapshot). + + The stream is guaranteed to send a ``read_time`` with + ``target_ids`` empty whenever the entire stream reaches a + new consistent snapshot. ADD, CURRENT, and RESET messages + are guaranteed to (eventually) result in a new consistent + snapshot (while NO_CHANGE and REMOVE messages are not). + + For a given stream, ``read_time`` is guaranteed to be + monotonically increasing. + """ + class TargetChangeType(proto.Enum): + r"""The type of change. + + Values: + NO_CHANGE (0): + No change has occurred. Used only to send an updated + ``resume_token``. + ADD (1): + The targets have been added. + REMOVE (2): + The targets have been removed. + CURRENT (3): + The targets reflect all changes committed before the targets + were added to the stream. + + This will be sent after or with a ``read_time`` that is + greater than or equal to the time at which the targets were + added. + + Listeners can wait for this change if read-after-write + semantics are desired. + RESET (4): + The targets have been reset, and a new initial state for the + targets will be returned in subsequent changes. + + After the initial state is complete, ``CURRENT`` will be + returned even if the target was previously indicated to be + ``CURRENT``. + """ + NO_CHANGE = 0 + ADD = 1 + REMOVE = 2 + CURRENT = 3 + RESET = 4 + + target_change_type: TargetChangeType = proto.Field( + proto.ENUM, + number=1, + enum=TargetChangeType, + ) + target_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + cause: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=4, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +class ListCollectionIdsRequest(proto.Message): + r"""The request for + [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The parent document. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + For example: + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + page_size (int): + The maximum number of results to return. + page_token (str): + A page token. Must be a value from + [ListCollectionIdsResponse][google.firestore.v1.ListCollectionIdsResponse]. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads documents as they were at the given + time. + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. + + This field is a member of `oneof`_ ``consistency_selector``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + oneof='consistency_selector', + message=timestamp_pb2.Timestamp, + ) + + +class ListCollectionIdsResponse(proto.Message): + r"""The response from + [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. + + Attributes: + collection_ids (MutableSequence[str]): + The collection ids. + next_page_token (str): + A page token that may be used to continue the + list. + """ + + @property + def raw_page(self): + return self + + collection_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class BatchWriteRequest(proto.Message): + r"""The request for + [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. + + Attributes: + database (str): + Required. The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + writes (MutableSequence[google.cloud.firestore_v1.types.Write]): + The writes to apply. + + Method does not apply writes atomically and does + not guarantee ordering. Each write succeeds or + fails independently. You cannot write to the + same document more than once per request. + labels (MutableMapping[str, str]): + Labels associated with this batch write. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + writes: MutableSequence[write.Write] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=write.Write, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class BatchWriteResponse(proto.Message): + r"""The response from + [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. + + Attributes: + write_results (MutableSequence[google.cloud.firestore_v1.types.WriteResult]): + The result of applying the writes. + + This i-th write result corresponds to the i-th + write in the request. + status (MutableSequence[google.rpc.status_pb2.Status]): + The status of applying the writes. + + This i-th write status corresponds to the i-th + write in the request. + """ + + write_results: MutableSequence[write.WriteResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=write.WriteResult, + ) + status: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query.py new file mode 100644 index 0000000000..9f3f4615bb --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query.py @@ -0,0 +1,875 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.firestore_v1.types import document +from google.protobuf import wrappers_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.v1', + manifest={ + 'StructuredQuery', + 'StructuredAggregationQuery', + 'Cursor', + }, +) + + +class StructuredQuery(proto.Message): + r"""A Firestore query. + + The query stages are executed in the following order: + + 1. from + 2. where + 3. select + 4. order_by + start_at + end_at + 5. offset + 6. limit + + Attributes: + select (google.cloud.firestore_v1.types.StructuredQuery.Projection): + Optional sub-set of the fields to return. + + This acts as a + [DocumentMask][google.firestore.v1.DocumentMask] over the + documents returned from a query. When not set, assumes that + the caller wants all fields returned. + from_ (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.CollectionSelector]): + The collections to query. + where (google.cloud.firestore_v1.types.StructuredQuery.Filter): + The filter to apply. + order_by (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.Order]): + The order to apply to the query results. + + Firestore allows callers to provide a full ordering, a + partial ordering, or no ordering at all. In all cases, + Firestore guarantees a stable ordering through the following + rules: + + - The ``order_by`` is required to reference all fields used + with an inequality filter. + - All fields that are required to be in the ``order_by`` + but are not already present are appended in + lexicographical ordering of the field name. + - If an order on ``__name__`` is not specified, it is + appended by default. + + Fields are appended with the same sort direction as the last + order specified, or 'ASCENDING' if no order was specified. + For example: + + - ``ORDER BY a`` becomes ``ORDER BY a ASC, __name__ ASC`` + - ``ORDER BY a DESC`` becomes + ``ORDER BY a DESC, __name__ DESC`` + - ``WHERE a > 1`` becomes + ``WHERE a > 1 ORDER BY a ASC, __name__ ASC`` + - ``WHERE __name__ > ... AND a > 1`` becomes + ``WHERE __name__ > ... AND a > 1 ORDER BY a ASC, __name__ ASC`` + start_at (google.cloud.firestore_v1.types.Cursor): + A potential prefix of a position in the result set to start + the query at. + + The ordering of the result set is based on the ``ORDER BY`` + clause of the original query. + + :: + + SELECT * FROM k WHERE a = 1 AND b > 2 ORDER BY b ASC, __name__ ASC; + + This query's results are ordered by + ``(b ASC, __name__ ASC)``. + + Cursors can reference either the full ordering or a prefix + of the location, though it cannot reference more fields than + what are in the provided ``ORDER BY``. + + Continuing off the example above, attaching the following + start cursors will have varying impact: + + - ``START BEFORE (2, /k/123)``: start the query right + before ``a = 1 AND b > 2 AND __name__ > /k/123``. + - ``START AFTER (10)``: start the query right after + ``a = 1 AND b > 10``. + + Unlike ``OFFSET`` which requires scanning over the first N + results to skip, a start cursor allows the query to begin at + a logical position. This position is not required to match + an actual result, it will scan forward from this position to + find the next document. + + Requires: + + - The number of values cannot be greater than the number of + fields specified in the ``ORDER BY`` clause. + end_at (google.cloud.firestore_v1.types.Cursor): + A potential prefix of a position in the result set to end + the query at. + + This is similar to ``START_AT`` but with it controlling the + end position rather than the start position. + + Requires: + + - The number of values cannot be greater than the number of + fields specified in the ``ORDER BY`` clause. + offset (int): + The number of documents to skip before returning the first + result. + + This applies after the constraints specified by the + ``WHERE``, ``START AT``, & ``END AT`` but before the + ``LIMIT`` clause. + + Requires: + + - The value must be greater than or equal to zero if + specified. + limit (google.protobuf.wrappers_pb2.Int32Value): + The maximum number of results to return. + + Applies after all other constraints. + + Requires: + + - The value must be greater than or equal to zero if + specified. + find_nearest (google.cloud.firestore_v1.types.StructuredQuery.FindNearest): + Optional. A potential Nearest Neighbors + Search. + Applies after all other filters and ordering. + + Finds the closest vector embeddings to the given + query vector. + """ + class Direction(proto.Enum): + r"""A sort direction. + + Values: + DIRECTION_UNSPECIFIED (0): + Unspecified. + ASCENDING (1): + Ascending. + DESCENDING (2): + Descending. + """ + DIRECTION_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + class CollectionSelector(proto.Message): + r"""A selection of a collection, such as ``messages as m1``. + + Attributes: + collection_id (str): + The collection ID. + When set, selects only collections with this ID. + all_descendants (bool): + When false, selects only collections that are immediate + children of the ``parent`` specified in the containing + ``RunQueryRequest``. When true, selects all descendant + collections. + """ + + collection_id: str = proto.Field( + proto.STRING, + number=2, + ) + all_descendants: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class Filter(proto.Message): + r"""A filter. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + composite_filter (google.cloud.firestore_v1.types.StructuredQuery.CompositeFilter): + A composite filter. + + This field is a member of `oneof`_ ``filter_type``. + field_filter (google.cloud.firestore_v1.types.StructuredQuery.FieldFilter): + A filter on a document field. + + This field is a member of `oneof`_ ``filter_type``. + unary_filter (google.cloud.firestore_v1.types.StructuredQuery.UnaryFilter): + A filter that takes exactly one argument. + + This field is a member of `oneof`_ ``filter_type``. + """ + + composite_filter: 'StructuredQuery.CompositeFilter' = proto.Field( + proto.MESSAGE, + number=1, + oneof='filter_type', + message='StructuredQuery.CompositeFilter', + ) + field_filter: 'StructuredQuery.FieldFilter' = proto.Field( + proto.MESSAGE, + number=2, + oneof='filter_type', + message='StructuredQuery.FieldFilter', + ) + unary_filter: 'StructuredQuery.UnaryFilter' = proto.Field( + proto.MESSAGE, + number=3, + oneof='filter_type', + message='StructuredQuery.UnaryFilter', + ) + + class CompositeFilter(proto.Message): + r"""A filter that merges multiple other filters using the given + operator. + + Attributes: + op (google.cloud.firestore_v1.types.StructuredQuery.CompositeFilter.Operator): + The operator for combining multiple filters. + filters (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.Filter]): + The list of filters to combine. + + Requires: + + - At least one filter is present. + """ + class Operator(proto.Enum): + r"""A composite filter operator. + + Values: + OPERATOR_UNSPECIFIED (0): + Unspecified. This value must not be used. + AND (1): + Documents are required to satisfy all of the + combined filters. + OR (2): + Documents are required to satisfy at least + one of the combined filters. + """ + OPERATOR_UNSPECIFIED = 0 + AND = 1 + OR = 2 + + op: 'StructuredQuery.CompositeFilter.Operator' = proto.Field( + proto.ENUM, + number=1, + enum='StructuredQuery.CompositeFilter.Operator', + ) + filters: MutableSequence['StructuredQuery.Filter'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='StructuredQuery.Filter', + ) + + class FieldFilter(proto.Message): + r"""A filter on a specific field. + + Attributes: + field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): + The field to filter by. + op (google.cloud.firestore_v1.types.StructuredQuery.FieldFilter.Operator): + The operator to filter by. + value (google.cloud.firestore_v1.types.Value): + The value to compare to. + """ + class Operator(proto.Enum): + r"""A field filter operator. + + Values: + OPERATOR_UNSPECIFIED (0): + Unspecified. This value must not be used. + LESS_THAN (1): + The given ``field`` is less than the given ``value``. + + Requires: + + - That ``field`` come first in ``order_by``. + LESS_THAN_OR_EQUAL (2): + The given ``field`` is less than or equal to the given + ``value``. + + Requires: + + - That ``field`` come first in ``order_by``. + GREATER_THAN (3): + The given ``field`` is greater than the given ``value``. + + Requires: + + - That ``field`` come first in ``order_by``. + GREATER_THAN_OR_EQUAL (4): + The given ``field`` is greater than or equal to the given + ``value``. + + Requires: + + - That ``field`` come first in ``order_by``. + EQUAL (5): + The given ``field`` is equal to the given ``value``. + NOT_EQUAL (6): + The given ``field`` is not equal to the given ``value``. + + Requires: + + - No other ``NOT_EQUAL``, ``NOT_IN``, ``IS_NOT_NULL``, or + ``IS_NOT_NAN``. + - That ``field`` comes first in the ``order_by``. + ARRAY_CONTAINS (7): + The given ``field`` is an array that contains the given + ``value``. + IN (8): + The given ``field`` is equal to at least one value in the + given array. + + Requires: + + - That ``value`` is a non-empty ``ArrayValue``, subject to + disjunction limits. + - No ``NOT_IN`` filters in the same query. + ARRAY_CONTAINS_ANY (9): + The given ``field`` is an array that contains any of the + values in the given array. + + Requires: + + - That ``value`` is a non-empty ``ArrayValue``, subject to + disjunction limits. + - No other ``ARRAY_CONTAINS_ANY`` filters within the same + disjunction. + - No ``NOT_IN`` filters in the same query. + NOT_IN (10): + The value of the ``field`` is not in the given array. + + Requires: + + - That ``value`` is a non-empty ``ArrayValue`` with at most + 10 values. + - No other ``OR``, ``IN``, ``ARRAY_CONTAINS_ANY``, + ``NOT_IN``, ``NOT_EQUAL``, ``IS_NOT_NULL``, or + ``IS_NOT_NAN``. + - That ``field`` comes first in the ``order_by``. + """ + OPERATOR_UNSPECIFIED = 0 + LESS_THAN = 1 + LESS_THAN_OR_EQUAL = 2 + GREATER_THAN = 3 + GREATER_THAN_OR_EQUAL = 4 + EQUAL = 5 + NOT_EQUAL = 6 + ARRAY_CONTAINS = 7 + IN = 8 + ARRAY_CONTAINS_ANY = 9 + NOT_IN = 10 + + field: 'StructuredQuery.FieldReference' = proto.Field( + proto.MESSAGE, + number=1, + message='StructuredQuery.FieldReference', + ) + op: 'StructuredQuery.FieldFilter.Operator' = proto.Field( + proto.ENUM, + number=2, + enum='StructuredQuery.FieldFilter.Operator', + ) + value: document.Value = proto.Field( + proto.MESSAGE, + number=3, + message=document.Value, + ) + + class UnaryFilter(proto.Message): + r"""A filter with a single operand. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + op (google.cloud.firestore_v1.types.StructuredQuery.UnaryFilter.Operator): + The unary operator to apply. + field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): + The field to which to apply the operator. + + This field is a member of `oneof`_ ``operand_type``. + """ + class Operator(proto.Enum): + r"""A unary operator. + + Values: + OPERATOR_UNSPECIFIED (0): + Unspecified. This value must not be used. + IS_NAN (2): + The given ``field`` is equal to ``NaN``. + IS_NULL (3): + The given ``field`` is equal to ``NULL``. + IS_NOT_NAN (4): + The given ``field`` is not equal to ``NaN``. + + Requires: + + - No other ``NOT_EQUAL``, ``NOT_IN``, ``IS_NOT_NULL``, or + ``IS_NOT_NAN``. + - That ``field`` comes first in the ``order_by``. + IS_NOT_NULL (5): + The given ``field`` is not equal to ``NULL``. + + Requires: + + - A single ``NOT_EQUAL``, ``NOT_IN``, ``IS_NOT_NULL``, or + ``IS_NOT_NAN``. + - That ``field`` comes first in the ``order_by``. + """ + OPERATOR_UNSPECIFIED = 0 + IS_NAN = 2 + IS_NULL = 3 + IS_NOT_NAN = 4 + IS_NOT_NULL = 5 + + op: 'StructuredQuery.UnaryFilter.Operator' = proto.Field( + proto.ENUM, + number=1, + enum='StructuredQuery.UnaryFilter.Operator', + ) + field: 'StructuredQuery.FieldReference' = proto.Field( + proto.MESSAGE, + number=2, + oneof='operand_type', + message='StructuredQuery.FieldReference', + ) + + class Order(proto.Message): + r"""An order on a field. + + Attributes: + field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): + The field to order by. + direction (google.cloud.firestore_v1.types.StructuredQuery.Direction): + The direction to order by. Defaults to ``ASCENDING``. + """ + + field: 'StructuredQuery.FieldReference' = proto.Field( + proto.MESSAGE, + number=1, + message='StructuredQuery.FieldReference', + ) + direction: 'StructuredQuery.Direction' = proto.Field( + proto.ENUM, + number=2, + enum='StructuredQuery.Direction', + ) + + class FieldReference(proto.Message): + r"""A reference to a field in a document, ex: ``stats.operations``. + + Attributes: + field_path (str): + A reference to a field in a document. + + Requires: + + - MUST be a dot-delimited (``.``) string of segments, where + each segment conforms to [document field + name][google.firestore.v1.Document.fields] limitations. + """ + + field_path: str = proto.Field( + proto.STRING, + number=2, + ) + + class Projection(proto.Message): + r"""The projection of document's fields to return. + + Attributes: + fields (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.FieldReference]): + The fields to return. + + If empty, all fields are returned. To only return the name + of the document, use ``['__name__']``. + """ + + fields: MutableSequence['StructuredQuery.FieldReference'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='StructuredQuery.FieldReference', + ) + + class FindNearest(proto.Message): + r"""Nearest Neighbors search config. + + Attributes: + vector_field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): + Required. An indexed vector field to search upon. Only + documents which contain vectors whose dimensionality match + the query_vector can be returned. + query_vector (google.cloud.firestore_v1.types.Value): + Required. The query vector that we are + searching on. Must be a vector of no more than + 2048 dimensions. + distance_measure (google.cloud.firestore_v1.types.StructuredQuery.FindNearest.DistanceMeasure): + Required. The Distance Measure to use, + required. + limit (google.protobuf.wrappers_pb2.Int32Value): + Required. The number of nearest neighbors to + return. Must be a positive integer of no more + than 1000. + """ + class DistanceMeasure(proto.Enum): + r"""The distance measure to use when comparing vectors. + + Values: + DISTANCE_MEASURE_UNSPECIFIED (0): + Should not be set. + EUCLIDEAN (1): + Measures the EUCLIDEAN distance between the vectors. See + `Euclidean `__ + to learn more + COSINE (2): + Compares vectors based on the angle between them, which + allows you to measure similarity that isn't based on the + vectors magnitude. We recommend using DOT_PRODUCT with unit + normalized vectors instead of COSINE distance, which is + mathematically equivalent with better performance. See + `Cosine + Similarity `__ + to learn more. + DOT_PRODUCT (3): + Similar to cosine but is affected by the magnitude of the + vectors. See `Dot + Product `__ to + learn more. + """ + DISTANCE_MEASURE_UNSPECIFIED = 0 + EUCLIDEAN = 1 + COSINE = 2 + DOT_PRODUCT = 3 + + vector_field: 'StructuredQuery.FieldReference' = proto.Field( + proto.MESSAGE, + number=1, + message='StructuredQuery.FieldReference', + ) + query_vector: document.Value = proto.Field( + proto.MESSAGE, + number=2, + message=document.Value, + ) + distance_measure: 'StructuredQuery.FindNearest.DistanceMeasure' = proto.Field( + proto.ENUM, + number=3, + enum='StructuredQuery.FindNearest.DistanceMeasure', + ) + limit: wrappers_pb2.Int32Value = proto.Field( + proto.MESSAGE, + number=4, + message=wrappers_pb2.Int32Value, + ) + + select: Projection = proto.Field( + proto.MESSAGE, + number=1, + message=Projection, + ) + from_: MutableSequence[CollectionSelector] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=CollectionSelector, + ) + where: Filter = proto.Field( + proto.MESSAGE, + number=3, + message=Filter, + ) + order_by: MutableSequence[Order] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=Order, + ) + start_at: 'Cursor' = proto.Field( + proto.MESSAGE, + number=7, + message='Cursor', + ) + end_at: 'Cursor' = proto.Field( + proto.MESSAGE, + number=8, + message='Cursor', + ) + offset: int = proto.Field( + proto.INT32, + number=6, + ) + limit: wrappers_pb2.Int32Value = proto.Field( + proto.MESSAGE, + number=5, + message=wrappers_pb2.Int32Value, + ) + find_nearest: FindNearest = proto.Field( + proto.MESSAGE, + number=9, + message=FindNearest, + ) + + +class StructuredAggregationQuery(proto.Message): + r"""Firestore query for running an aggregation over a + [StructuredQuery][google.firestore.v1.StructuredQuery]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + structured_query (google.cloud.firestore_v1.types.StructuredQuery): + Nested structured query. + + This field is a member of `oneof`_ ``query_type``. + aggregations (MutableSequence[google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation]): + Optional. Series of aggregations to apply over the results + of the ``structured_query``. + + Requires: + + - A minimum of one and maximum of five aggregations per + query. + """ + + class Aggregation(proto.Message): + r"""Defines an aggregation that produces a single result. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + count (google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation.Count): + Count aggregator. + + This field is a member of `oneof`_ ``operator``. + sum (google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation.Sum): + Sum aggregator. + + This field is a member of `oneof`_ ``operator``. + avg (google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation.Avg): + Average aggregator. + + This field is a member of `oneof`_ ``operator``. + alias (str): + Optional. Optional name of the field to store the result of + the aggregation into. + + If not provided, Firestore will pick a default name + following the format ``field_``. For + example: + + :: + + AGGREGATE + COUNT_UP_TO(1) AS count_up_to_1, + COUNT_UP_TO(2), + COUNT_UP_TO(3) AS count_up_to_3, + COUNT(*) + OVER ( + ... + ); + + becomes: + + :: + + AGGREGATE + COUNT_UP_TO(1) AS count_up_to_1, + COUNT_UP_TO(2) AS field_1, + COUNT_UP_TO(3) AS count_up_to_3, + COUNT(*) AS field_2 + OVER ( + ... + ); + + Requires: + + - Must be unique across all aggregation aliases. + - Conform to [document field + name][google.firestore.v1.Document.fields] limitations. + """ + + class Count(proto.Message): + r"""Count of documents that match the query. + + The ``COUNT(*)`` aggregation function operates on the entire + document so it does not require a field reference. + + Attributes: + up_to (google.protobuf.wrappers_pb2.Int64Value): + Optional. Optional constraint on the maximum number of + documents to count. + + This provides a way to set an upper bound on the number of + documents to scan, limiting latency, and cost. + + Unspecified is interpreted as no bound. + + High-Level Example: + + :: + + AGGREGATE COUNT_UP_TO(1000) OVER ( SELECT * FROM k ); + + Requires: + + - Must be greater than zero when present. + """ + + up_to: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.Int64Value, + ) + + class Sum(proto.Message): + r"""Sum of the values of the requested field. + + - Only numeric values will be aggregated. All non-numeric values + including ``NULL`` are skipped. + + - If the aggregated values contain ``NaN``, returns ``NaN``. + Infinity math follows IEEE-754 standards. + + - If the aggregated value set is empty, returns 0. + + - Returns a 64-bit integer if all aggregated numbers are integers + and the sum result does not overflow. Otherwise, the result is + returned as a double. Note that even if all the aggregated values + are integers, the result is returned as a double if it cannot fit + within a 64-bit signed integer. When this occurs, the returned + value will lose precision. + + - When underflow occurs, floating-point aggregation is + non-deterministic. This means that running the same query + repeatedly without any changes to the underlying values could + produce slightly different results each time. In those cases, + values should be stored as integers over floating-point numbers. + + Attributes: + field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): + The field to aggregate on. + """ + + field: 'StructuredQuery.FieldReference' = proto.Field( + proto.MESSAGE, + number=1, + message='StructuredQuery.FieldReference', + ) + + class Avg(proto.Message): + r"""Average of the values of the requested field. + + - Only numeric values will be aggregated. All non-numeric values + including ``NULL`` are skipped. + + - If the aggregated values contain ``NaN``, returns ``NaN``. + Infinity math follows IEEE-754 standards. + + - If the aggregated value set is empty, returns ``NULL``. + + - Always returns the result as a double. + + Attributes: + field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): + The field to aggregate on. + """ + + field: 'StructuredQuery.FieldReference' = proto.Field( + proto.MESSAGE, + number=1, + message='StructuredQuery.FieldReference', + ) + + count: 'StructuredAggregationQuery.Aggregation.Count' = proto.Field( + proto.MESSAGE, + number=1, + oneof='operator', + message='StructuredAggregationQuery.Aggregation.Count', + ) + sum: 'StructuredAggregationQuery.Aggregation.Sum' = proto.Field( + proto.MESSAGE, + number=2, + oneof='operator', + message='StructuredAggregationQuery.Aggregation.Sum', + ) + avg: 'StructuredAggregationQuery.Aggregation.Avg' = proto.Field( + proto.MESSAGE, + number=3, + oneof='operator', + message='StructuredAggregationQuery.Aggregation.Avg', + ) + alias: str = proto.Field( + proto.STRING, + number=7, + ) + + structured_query: 'StructuredQuery' = proto.Field( + proto.MESSAGE, + number=1, + oneof='query_type', + message='StructuredQuery', + ) + aggregations: MutableSequence[Aggregation] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Aggregation, + ) + + +class Cursor(proto.Message): + r"""A position in a query result set. + + Attributes: + values (MutableSequence[google.cloud.firestore_v1.types.Value]): + The values that represent a position, in the + order they appear in the order by clause of a + query. + + Can contain fewer values than specified in the + order by clause. + before (bool): + If the position is just before or just after + the given values, relative to the sort order + defined by the query. + """ + + values: MutableSequence[document.Value] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=document.Value, + ) + before: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query_profile.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query_profile.py new file mode 100644 index 0000000000..201815ecf6 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query_profile.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.v1', + manifest={ + 'ExplainOptions', + 'ExplainMetrics', + 'PlanSummary', + 'ExecutionStats', + }, +) + + +class ExplainOptions(proto.Message): + r"""Explain options for the query. + + Attributes: + analyze (bool): + Optional. Whether to execute this query. + + When false (the default), the query will be + planned, returning only metrics from the + planning stages. + + When true, the query will be planned and + executed, returning the full query results along + with both planning and execution stage metrics. + """ + + analyze: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ExplainMetrics(proto.Message): + r"""Explain metrics for the query. + + Attributes: + plan_summary (google.cloud.firestore_v1.types.PlanSummary): + Planning phase information for the query. + execution_stats (google.cloud.firestore_v1.types.ExecutionStats): + Aggregated stats from the execution of the query. Only + present when + [ExplainOptions.analyze][google.firestore.v1.ExplainOptions.analyze] + is set to true. + """ + + plan_summary: 'PlanSummary' = proto.Field( + proto.MESSAGE, + number=1, + message='PlanSummary', + ) + execution_stats: 'ExecutionStats' = proto.Field( + proto.MESSAGE, + number=2, + message='ExecutionStats', + ) + + +class PlanSummary(proto.Message): + r"""Planning phase information for the query. + + Attributes: + indexes_used (MutableSequence[google.protobuf.struct_pb2.Struct]): + The indexes selected for the query. For example: [ + {"query_scope": "Collection", "properties": "(foo ASC, + **name** ASC)"}, {"query_scope": "Collection", "properties": + "(bar ASC, **name** ASC)"} ] + """ + + indexes_used: MutableSequence[struct_pb2.Struct] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + +class ExecutionStats(proto.Message): + r"""Execution statistics for the query. + + Attributes: + results_returned (int): + Total number of results returned, including + documents, projections, aggregation results, + keys. + execution_duration (google.protobuf.duration_pb2.Duration): + Total time to execute the query in the + backend. + read_operations (int): + Total billable read operations. + debug_stats (google.protobuf.struct_pb2.Struct): + Debugging statistics from the execution of the query. Note + that the debugging stats are subject to change as Firestore + evolves. It could include: { "indexes_entries_scanned": + "1000", "documents_scanned": "20", "billing_details" : { + "documents_billable": "20", "index_entries_billable": + "1000", "min_query_cost": "0" } } + """ + + results_returned: int = proto.Field( + proto.INT64, + number=1, + ) + execution_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + read_operations: int = proto.Field( + proto.INT64, + number=4, + ) + debug_stats: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/write.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/write.py new file mode 100644 index 0000000000..184c44be10 --- /dev/null +++ b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/write.py @@ -0,0 +1,509 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.firestore_v1.types import bloom_filter +from google.cloud.firestore_v1.types import common +from google.cloud.firestore_v1.types import document as gf_document +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.v1', + manifest={ + 'Write', + 'DocumentTransform', + 'WriteResult', + 'DocumentChange', + 'DocumentDelete', + 'DocumentRemove', + 'ExistenceFilter', + }, +) + + +class Write(proto.Message): + r"""A write on a document. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + update (google.cloud.firestore_v1.types.Document): + A document to write. + + This field is a member of `oneof`_ ``operation``. + delete (str): + A document name to delete. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + + This field is a member of `oneof`_ ``operation``. + transform (google.cloud.firestore_v1.types.DocumentTransform): + Applies a transformation to a document. + + This field is a member of `oneof`_ ``operation``. + update_mask (google.cloud.firestore_v1.types.DocumentMask): + The fields to update in this write. + + This field can be set only when the operation is ``update``. + If the mask is not set for an ``update`` and the document + exists, any existing data will be overwritten. If the mask + is set and the document on the server has fields not covered + by the mask, they are left unchanged. Fields referenced in + the mask, but not present in the input document, are deleted + from the document on the server. The field paths in this + mask must not contain a reserved field name. + update_transforms (MutableSequence[google.cloud.firestore_v1.types.DocumentTransform.FieldTransform]): + The transforms to perform after update. + + This field can be set only when the operation is ``update``. + If present, this write is equivalent to performing + ``update`` and ``transform`` to the same document atomically + and in order. + current_document (google.cloud.firestore_v1.types.Precondition): + An optional precondition on the document. + + The write will fail if this is set and not met + by the target document. + """ + + update: gf_document.Document = proto.Field( + proto.MESSAGE, + number=1, + oneof='operation', + message=gf_document.Document, + ) + delete: str = proto.Field( + proto.STRING, + number=2, + oneof='operation', + ) + transform: 'DocumentTransform' = proto.Field( + proto.MESSAGE, + number=6, + oneof='operation', + message='DocumentTransform', + ) + update_mask: common.DocumentMask = proto.Field( + proto.MESSAGE, + number=3, + message=common.DocumentMask, + ) + update_transforms: MutableSequence['DocumentTransform.FieldTransform'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='DocumentTransform.FieldTransform', + ) + current_document: common.Precondition = proto.Field( + proto.MESSAGE, + number=4, + message=common.Precondition, + ) + + +class DocumentTransform(proto.Message): + r"""A transformation of a document. + + Attributes: + document (str): + The name of the document to transform. + field_transforms (MutableSequence[google.cloud.firestore_v1.types.DocumentTransform.FieldTransform]): + The list of transformations to apply to the + fields of the document, in order. + This must not be empty. + """ + + class FieldTransform(proto.Message): + r"""A transformation of a field of the document. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field_path (str): + The path of the field. See + [Document.fields][google.firestore.v1.Document.fields] for + the field path syntax reference. + set_to_server_value (google.cloud.firestore_v1.types.DocumentTransform.FieldTransform.ServerValue): + Sets the field to the given server value. + + This field is a member of `oneof`_ ``transform_type``. + increment (google.cloud.firestore_v1.types.Value): + Adds the given value to the field's current + value. + This must be an integer or a double value. + If the field is not an integer or double, or if + the field does not yet exist, the transformation + will set the field to the given value. If either + of the given value or the current field value + are doubles, both values will be interpreted as + doubles. Double arithmetic and representation of + double values follow IEEE 754 semantics. If + there is positive/negative integer overflow, the + field is resolved to the largest magnitude + positive/negative integer. + + This field is a member of `oneof`_ ``transform_type``. + maximum (google.cloud.firestore_v1.types.Value): + Sets the field to the maximum of its current + value and the given value. + This must be an integer or a double value. + If the field is not an integer or double, or if + the field does not yet exist, the transformation + will set the field to the given value. If a + maximum operation is applied where the field and + the input value are of mixed types (that is - + one is an integer and one is a double) the field + takes on the type of the larger operand. If the + operands are equivalent (e.g. 3 and 3.0), the + field does not change. 0, 0.0, and -0.0 are all + zero. The maximum of a zero stored value and + zero input value is always the stored value. + The maximum of any numeric value x and NaN is + NaN. + + This field is a member of `oneof`_ ``transform_type``. + minimum (google.cloud.firestore_v1.types.Value): + Sets the field to the minimum of its current + value and the given value. + This must be an integer or a double value. + If the field is not an integer or double, or if + the field does not yet exist, the transformation + will set the field to the input value. If a + minimum operation is applied where the field and + the input value are of mixed types (that is - + one is an integer and one is a double) the field + takes on the type of the smaller operand. If the + operands are equivalent (e.g. 3 and 3.0), the + field does not change. 0, 0.0, and -0.0 are all + zero. The minimum of a zero stored value and + zero input value is always the stored value. + The minimum of any numeric value x and NaN is + NaN. + + This field is a member of `oneof`_ ``transform_type``. + append_missing_elements (google.cloud.firestore_v1.types.ArrayValue): + Append the given elements in order if they are not already + present in the current field value. If the field is not an + array, or if the field does not yet exist, it is first set + to the empty array. + + Equivalent numbers of different types (e.g. 3L and 3.0) are + considered equal when checking if a value is missing. NaN is + equal to NaN, and Null is equal to Null. If the input + contains multiple equivalent values, only the first will be + considered. + + The corresponding transform_result will be the null value. + + This field is a member of `oneof`_ ``transform_type``. + remove_all_from_array (google.cloud.firestore_v1.types.ArrayValue): + Remove all of the given elements from the array in the + field. If the field is not an array, or if the field does + not yet exist, it is set to the empty array. + + Equivalent numbers of the different types (e.g. 3L and 3.0) + are considered equal when deciding whether an element should + be removed. NaN is equal to NaN, and Null is equal to Null. + This will remove all equivalent values if there are + duplicates. + + The corresponding transform_result will be the null value. + + This field is a member of `oneof`_ ``transform_type``. + """ + class ServerValue(proto.Enum): + r"""A value that is calculated by the server. + + Values: + SERVER_VALUE_UNSPECIFIED (0): + Unspecified. This value must not be used. + REQUEST_TIME (1): + The time at which the server processed the + request, with millisecond precision. If used on + multiple fields (same or different documents) in + a transaction, all the fields will get the same + server timestamp. + """ + SERVER_VALUE_UNSPECIFIED = 0 + REQUEST_TIME = 1 + + field_path: str = proto.Field( + proto.STRING, + number=1, + ) + set_to_server_value: 'DocumentTransform.FieldTransform.ServerValue' = proto.Field( + proto.ENUM, + number=2, + oneof='transform_type', + enum='DocumentTransform.FieldTransform.ServerValue', + ) + increment: gf_document.Value = proto.Field( + proto.MESSAGE, + number=3, + oneof='transform_type', + message=gf_document.Value, + ) + maximum: gf_document.Value = proto.Field( + proto.MESSAGE, + number=4, + oneof='transform_type', + message=gf_document.Value, + ) + minimum: gf_document.Value = proto.Field( + proto.MESSAGE, + number=5, + oneof='transform_type', + message=gf_document.Value, + ) + append_missing_elements: gf_document.ArrayValue = proto.Field( + proto.MESSAGE, + number=6, + oneof='transform_type', + message=gf_document.ArrayValue, + ) + remove_all_from_array: gf_document.ArrayValue = proto.Field( + proto.MESSAGE, + number=7, + oneof='transform_type', + message=gf_document.ArrayValue, + ) + + document: str = proto.Field( + proto.STRING, + number=1, + ) + field_transforms: MutableSequence[FieldTransform] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=FieldTransform, + ) + + +class WriteResult(proto.Message): + r"""The result of applying a write. + + Attributes: + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last update time of the document after applying the + write. Not set after a ``delete``. + + If the write did not actually change the document, this will + be the previous update_time. + transform_results (MutableSequence[google.cloud.firestore_v1.types.Value]): + The results of applying each + [DocumentTransform.FieldTransform][google.firestore.v1.DocumentTransform.FieldTransform], + in the same order. + """ + + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + transform_results: MutableSequence[gf_document.Value] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gf_document.Value, + ) + + +class DocumentChange(proto.Message): + r"""A [Document][google.firestore.v1.Document] has changed. + + May be the result of multiple [writes][google.firestore.v1.Write], + including deletes, that ultimately resulted in a new value for the + [Document][google.firestore.v1.Document]. + + Multiple [DocumentChange][google.firestore.v1.DocumentChange] + messages may be returned for the same logical change, if multiple + targets are affected. + + Attributes: + document (google.cloud.firestore_v1.types.Document): + The new state of the + [Document][google.firestore.v1.Document]. + + If ``mask`` is set, contains only fields that were updated + or added. + target_ids (MutableSequence[int]): + A set of target IDs of targets that match + this document. + removed_target_ids (MutableSequence[int]): + A set of target IDs for targets that no + longer match this document. + """ + + document: gf_document.Document = proto.Field( + proto.MESSAGE, + number=1, + message=gf_document.Document, + ) + target_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=5, + ) + removed_target_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=6, + ) + + +class DocumentDelete(proto.Message): + r"""A [Document][google.firestore.v1.Document] has been deleted. + + May be the result of multiple [writes][google.firestore.v1.Write], + including updates, the last of which deleted the + [Document][google.firestore.v1.Document]. + + Multiple [DocumentDelete][google.firestore.v1.DocumentDelete] + messages may be returned for the same logical delete, if multiple + targets are affected. + + Attributes: + document (str): + The resource name of the + [Document][google.firestore.v1.Document] that was deleted. + removed_target_ids (MutableSequence[int]): + A set of target IDs for targets that + previously matched this entity. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The read timestamp at which the delete was observed. + + Greater or equal to the ``commit_time`` of the delete. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + removed_target_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=6, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class DocumentRemove(proto.Message): + r"""A [Document][google.firestore.v1.Document] has been removed from the + view of the targets. + + Sent if the document is no longer relevant to a target and is out of + view. Can be sent instead of a DocumentDelete or a DocumentChange if + the server can not send the new value of the document. + + Multiple [DocumentRemove][google.firestore.v1.DocumentRemove] + messages may be returned for the same logical write or delete, if + multiple targets are affected. + + Attributes: + document (str): + The resource name of the + [Document][google.firestore.v1.Document] that has gone out + of view. + removed_target_ids (MutableSequence[int]): + A set of target IDs for targets that + previously matched this document. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The read timestamp at which the remove was observed. + + Greater or equal to the ``commit_time`` of the + change/delete/remove. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + removed_target_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class ExistenceFilter(proto.Message): + r"""A digest of all the documents that match a given target. + + Attributes: + target_id (int): + The target ID to which this filter applies. + count (int): + The total count of documents that match + [target_id][google.firestore.v1.ExistenceFilter.target_id]. + + If different from the count of documents in the client that + match, the client must manually determine which documents no + longer match the target. + + The client can use the ``unchanged_names`` bloom filter to + assist with this determination by testing ALL the document + names against the filter; if the document name is NOT in the + filter, it means the document no longer matches the target. + unchanged_names (google.cloud.firestore_v1.types.BloomFilter): + A bloom filter that, despite its name, contains the UTF-8 + byte encodings of the resource names of ALL the documents + that match + [target_id][google.firestore.v1.ExistenceFilter.target_id], + in the form + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + + This bloom filter may be omitted at the server's discretion, + such as if it is deemed that the client will not make use of + it or if it is too computationally expensive to calculate or + transmit. Clients must gracefully handle this field being + absent by falling back to the logic used before this field + existed; that is, re-add the target without a resume token + to figure out which documents in the client's cache are out + of sync. + """ + + target_id: int = proto.Field( + proto.INT32, + number=1, + ) + count: int = proto.Field( + proto.INT32, + number=2, + ) + unchanged_names: bloom_filter.BloomFilter = proto.Field( + proto.MESSAGE, + number=3, + message=bloom_filter.BloomFilter, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/mypy.ini b/owl-bot-staging/firestore/v1/mypy.ini new file mode 100644 index 0000000000..574c5aed39 --- /dev/null +++ b/owl-bot-staging/firestore/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/firestore/v1/noxfile.py b/owl-bot-staging/firestore/v1/noxfile.py new file mode 100644 index 0000000000..0633d4d81f --- /dev/null +++ b/owl-bot-staging/firestore/v1/noxfile.py @@ -0,0 +1,253 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12" +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-firestore' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.12" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/firestore_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + +@nox.session(python=ALL_PYTHON[-1]) +def prerelease_deps(session): + """Run the unit test suite against pre-release versions of dependencies.""" + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/firestore_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_async.py new file mode 100644 index 0000000000..ca00b0567b --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_BatchGetDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_batch_get_documents(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.BatchGetDocumentsRequest( + transaction=b'transaction_blob', + database="database_value", + ) + + # Make the request + stream = await client.batch_get_documents(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_BatchGetDocuments_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_sync.py new file mode 100644 index 0000000000..12ad8bda43 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_BatchGetDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_batch_get_documents(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.BatchGetDocumentsRequest( + transaction=b'transaction_blob', + database="database_value", + ) + + # Make the request + stream = client.batch_get_documents(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_BatchGetDocuments_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_async.py new file mode 100644 index 0000000000..092f2ec2a2 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchWrite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_BatchWrite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_batch_write(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.BatchWriteRequest( + database="database_value", + ) + + # Make the request + response = await client.batch_write(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_BatchWrite_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_sync.py new file mode 100644 index 0000000000..d943de176e --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchWrite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_BatchWrite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_batch_write(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.BatchWriteRequest( + database="database_value", + ) + + # Make the request + response = client.batch_write(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_BatchWrite_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_async.py new file mode 100644 index 0000000000..8f9a2d8378 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BeginTransaction +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_BeginTransaction_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_begin_transaction(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.BeginTransactionRequest( + database="database_value", + ) + + # Make the request + response = await client.begin_transaction(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_BeginTransaction_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_sync.py new file mode 100644 index 0000000000..f036f63c25 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BeginTransaction +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_BeginTransaction_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_begin_transaction(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.BeginTransactionRequest( + database="database_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_BeginTransaction_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_async.py new file mode 100644 index 0000000000..7403705ee4 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Commit +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_Commit_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_commit(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.CommitRequest( + database="database_value", + ) + + # Make the request + response = await client.commit(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_Commit_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_sync.py new file mode 100644 index 0000000000..059cf23bca --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Commit +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_Commit_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_commit(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.CommitRequest( + database="database_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_Commit_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_async.py new file mode 100644 index 0000000000..6ff6596d30 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_CreateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_create_document(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.CreateDocumentRequest( + parent="parent_value", + collection_id="collection_id_value", + ) + + # Make the request + response = await client.create_document(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_CreateDocument_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_sync.py new file mode 100644 index 0000000000..d26f82921b --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_CreateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_create_document(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.CreateDocumentRequest( + parent="parent_value", + collection_id="collection_id_value", + ) + + # Make the request + response = client.create_document(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_CreateDocument_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_async.py new file mode 100644 index 0000000000..f7e822edb6 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_DeleteDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_delete_document(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + await client.delete_document(request=request) + + +# [END firestore_v1_generated_Firestore_DeleteDocument_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_sync.py new file mode 100644 index 0000000000..b51f132adc --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_DeleteDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_delete_document(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + client.delete_document(request=request) + + +# [END firestore_v1_generated_Firestore_DeleteDocument_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_async.py new file mode 100644 index 0000000000..8436177089 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_GetDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_get_document(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.GetDocumentRequest( + transaction=b'transaction_blob', + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_GetDocument_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_sync.py new file mode 100644 index 0000000000..d275afa9e0 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_GetDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_get_document(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.GetDocumentRequest( + transaction=b'transaction_blob', + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_GetDocument_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_async.py new file mode 100644 index 0000000000..a5bb34a6c5 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCollectionIds +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_ListCollectionIds_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_list_collection_ids(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.ListCollectionIdsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_collection_ids(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END firestore_v1_generated_Firestore_ListCollectionIds_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_sync.py new file mode 100644 index 0000000000..6afc4ecf08 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCollectionIds +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_ListCollectionIds_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_list_collection_ids(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.ListCollectionIdsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_collection_ids(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END firestore_v1_generated_Firestore_ListCollectionIds_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_async.py new file mode 100644 index 0000000000..d6d73863ae --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_ListDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_list_documents(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.ListDocumentsRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END firestore_v1_generated_Firestore_ListDocuments_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_sync.py new file mode 100644 index 0000000000..36374e65ac --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_ListDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_list_documents(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.ListDocumentsRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END firestore_v1_generated_Firestore_ListDocuments_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_async.py new file mode 100644 index 0000000000..8a2aeca34a --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_async.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Listen +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_Listen_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_listen(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + add_target = firestore_v1.Target() + add_target.resume_token = b'resume_token_blob' + + request = firestore_v1.ListenRequest( + add_target=add_target, + database="database_value", + ) + + # This method expects an iterator which contains + # 'firestore_v1.ListenRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.listen(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_Listen_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_sync.py new file mode 100644 index 0000000000..ad95096b8e --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_sync.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Listen +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_Listen_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_listen(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + add_target = firestore_v1.Target() + add_target.resume_token = b'resume_token_blob' + + request = firestore_v1.ListenRequest( + add_target=add_target, + database="database_value", + ) + + # This method expects an iterator which contains + # 'firestore_v1.ListenRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.listen(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_Listen_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_async.py new file mode 100644 index 0000000000..252bff079b --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_PartitionQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_partition_query(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.PartitionQueryRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.partition_query(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END firestore_v1_generated_Firestore_PartitionQuery_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_sync.py new file mode 100644 index 0000000000..790057a9f6 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_PartitionQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_partition_query(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.PartitionQueryRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.partition_query(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END firestore_v1_generated_Firestore_PartitionQuery_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_async.py new file mode 100644 index 0000000000..b44f38feba --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Rollback +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_Rollback_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_rollback(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.RollbackRequest( + database="database_value", + transaction=b'transaction_blob', + ) + + # Make the request + await client.rollback(request=request) + + +# [END firestore_v1_generated_Firestore_Rollback_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_sync.py new file mode 100644 index 0000000000..8e11439857 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Rollback +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_Rollback_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_rollback(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.RollbackRequest( + database="database_value", + transaction=b'transaction_blob', + ) + + # Make the request + client.rollback(request=request) + + +# [END firestore_v1_generated_Firestore_Rollback_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_async.py new file mode 100644 index 0000000000..2c82a287ef --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunAggregationQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_RunAggregationQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_run_aggregation_query(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.RunAggregationQueryRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + stream = await client.run_aggregation_query(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_RunAggregationQuery_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_sync.py new file mode 100644 index 0000000000..1f814a5f48 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunAggregationQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_RunAggregationQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_run_aggregation_query(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.RunAggregationQueryRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + stream = client.run_aggregation_query(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_RunAggregationQuery_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_async.py new file mode 100644 index 0000000000..ea4d24ad69 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_RunQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_run_query(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.RunQueryRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + stream = await client.run_query(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_RunQuery_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_sync.py new file mode 100644 index 0000000000..bf51ccc402 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_RunQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_run_query(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.RunQueryRequest( + transaction=b'transaction_blob', + parent="parent_value", + ) + + # Make the request + stream = client.run_query(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_RunQuery_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_async.py new file mode 100644 index 0000000000..f5ed989a53 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_UpdateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_update_document(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.UpdateDocumentRequest( + ) + + # Make the request + response = await client.update_document(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_UpdateDocument_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_sync.py new file mode 100644 index 0000000000..bd8cdc296a --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_UpdateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_update_document(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.UpdateDocumentRequest( + ) + + # Make the request + response = client.update_document(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_Firestore_UpdateDocument_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_async.py new file mode 100644 index 0000000000..561098e322 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Write +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_Write_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +async def sample_write(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + request = firestore_v1.WriteRequest( + database="database_value", + ) + + # This method expects an iterator which contains + # 'firestore_v1.WriteRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.write(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_Write_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_sync.py new file mode 100644 index 0000000000..1980d0935b --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Write +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore + + +# [START firestore_v1_generated_Firestore_Write_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_v1 + + +def sample_write(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + request = firestore_v1.WriteRequest( + database="database_value", + ) + + # This method expects an iterator which contains + # 'firestore_v1.WriteRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.write(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END firestore_v1_generated_Firestore_Write_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/snippet_metadata_google.firestore.v1.json b/owl-bot-staging/firestore/v1/samples/generated_samples/snippet_metadata_google.firestore.v1.json new file mode 100644 index 0000000000..90a3e4b880 --- /dev/null +++ b/owl-bot-staging/firestore/v1/samples/generated_samples/snippet_metadata_google.firestore.v1.json @@ -0,0 +1,2523 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.firestore.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-firestore", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.batch_get_documents", + "method": { + "fullName": "google.firestore.v1.Firestore.BatchGetDocuments", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "BatchGetDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.BatchGetDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]", + "shortName": "batch_get_documents" + }, + "description": "Sample for BatchGetDocuments", + "file": "firestore_v1_generated_firestore_batch_get_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_BatchGetDocuments_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_batch_get_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.batch_get_documents", + "method": { + "fullName": "google.firestore.v1.Firestore.BatchGetDocuments", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "BatchGetDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.BatchGetDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]", + "shortName": "batch_get_documents" + }, + "description": "Sample for BatchGetDocuments", + "file": "firestore_v1_generated_firestore_batch_get_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_BatchGetDocuments_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_batch_get_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.batch_write", + "method": { + "fullName": "google.firestore.v1.Firestore.BatchWrite", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "BatchWrite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.BatchWriteRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.BatchWriteResponse", + "shortName": "batch_write" + }, + "description": "Sample for BatchWrite", + "file": "firestore_v1_generated_firestore_batch_write_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_BatchWrite_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_batch_write_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.batch_write", + "method": { + "fullName": "google.firestore.v1.Firestore.BatchWrite", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "BatchWrite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.BatchWriteRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.BatchWriteResponse", + "shortName": "batch_write" + }, + "description": "Sample for BatchWrite", + "file": "firestore_v1_generated_firestore_batch_write_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_BatchWrite_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_batch_write_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.begin_transaction", + "method": { + "fullName": "google.firestore.v1.Firestore.BeginTransaction", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "BeginTransaction" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.BeginTransactionRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.BeginTransactionResponse", + "shortName": "begin_transaction" + }, + "description": "Sample for BeginTransaction", + "file": "firestore_v1_generated_firestore_begin_transaction_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_BeginTransaction_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_begin_transaction_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.begin_transaction", + "method": { + "fullName": "google.firestore.v1.Firestore.BeginTransaction", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "BeginTransaction" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.BeginTransactionRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.BeginTransactionResponse", + "shortName": "begin_transaction" + }, + "description": "Sample for BeginTransaction", + "file": "firestore_v1_generated_firestore_begin_transaction_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_BeginTransaction_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_begin_transaction_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.commit", + "method": { + "fullName": "google.firestore.v1.Firestore.Commit", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "Commit" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.CommitRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "writes", + "type": "MutableSequence[google.cloud.firestore_v1.types.Write]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.CommitResponse", + "shortName": "commit" + }, + "description": "Sample for Commit", + "file": "firestore_v1_generated_firestore_commit_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_Commit_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_commit_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.commit", + "method": { + "fullName": "google.firestore.v1.Firestore.Commit", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "Commit" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.CommitRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "writes", + "type": "MutableSequence[google.cloud.firestore_v1.types.Write]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.CommitResponse", + "shortName": "commit" + }, + "description": "Sample for Commit", + "file": "firestore_v1_generated_firestore_commit_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_Commit_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_commit_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.create_document", + "method": { + "fullName": "google.firestore.v1.Firestore.CreateDocument", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.CreateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "firestore_v1_generated_firestore_create_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_CreateDocument_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_create_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.create_document", + "method": { + "fullName": "google.firestore.v1.Firestore.CreateDocument", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.CreateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "firestore_v1_generated_firestore_create_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_CreateDocument_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_create_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.delete_document", + "method": { + "fullName": "google.firestore.v1.Firestore.DeleteDocument", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "firestore_v1_generated_firestore_delete_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_DeleteDocument_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_delete_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.delete_document", + "method": { + "fullName": "google.firestore.v1.Firestore.DeleteDocument", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "firestore_v1_generated_firestore_delete_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_DeleteDocument_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_delete_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.get_document", + "method": { + "fullName": "google.firestore.v1.Firestore.GetDocument", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.GetDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "firestore_v1_generated_firestore_get_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_GetDocument_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_get_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.get_document", + "method": { + "fullName": "google.firestore.v1.Firestore.GetDocument", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.GetDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "firestore_v1_generated_firestore_get_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_GetDocument_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_get_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.list_collection_ids", + "method": { + "fullName": "google.firestore.v1.Firestore.ListCollectionIds", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "ListCollectionIds" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.ListCollectionIdsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsAsyncPager", + "shortName": "list_collection_ids" + }, + "description": "Sample for ListCollectionIds", + "file": "firestore_v1_generated_firestore_list_collection_ids_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_ListCollectionIds_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_list_collection_ids_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.list_collection_ids", + "method": { + "fullName": "google.firestore.v1.Firestore.ListCollectionIds", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "ListCollectionIds" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.ListCollectionIdsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsPager", + "shortName": "list_collection_ids" + }, + "description": "Sample for ListCollectionIds", + "file": "firestore_v1_generated_firestore_list_collection_ids_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_ListCollectionIds_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_list_collection_ids_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.list_documents", + "method": { + "fullName": "google.firestore.v1.Firestore.ListDocuments", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.ListDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "firestore_v1_generated_firestore_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_ListDocuments_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.list_documents", + "method": { + "fullName": "google.firestore.v1.Firestore.ListDocuments", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.ListDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "firestore_v1_generated_firestore_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_ListDocuments_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_list_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.listen", + "method": { + "fullName": "google.firestore.v1.Firestore.Listen", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "Listen" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.firestore_v1.types.ListenRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.ListenResponse]", + "shortName": "listen" + }, + "description": "Sample for Listen", + "file": "firestore_v1_generated_firestore_listen_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_Listen_async", + "segments": [ + { + "end": 66, + "start": 27, + "type": "FULL" + }, + { + "end": 66, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 59, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 60, + "type": "REQUEST_EXECUTION" + }, + { + "end": 67, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_listen_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.listen", + "method": { + "fullName": "google.firestore.v1.Firestore.Listen", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "Listen" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.firestore_v1.types.ListenRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.ListenResponse]", + "shortName": "listen" + }, + "description": "Sample for Listen", + "file": "firestore_v1_generated_firestore_listen_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_Listen_sync", + "segments": [ + { + "end": 66, + "start": 27, + "type": "FULL" + }, + { + "end": 66, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 59, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 60, + "type": "REQUEST_EXECUTION" + }, + { + "end": 67, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_listen_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.partition_query", + "method": { + "fullName": "google.firestore.v1.Firestore.PartitionQuery", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "PartitionQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.PartitionQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryAsyncPager", + "shortName": "partition_query" + }, + "description": "Sample for PartitionQuery", + "file": "firestore_v1_generated_firestore_partition_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_PartitionQuery_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_partition_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.partition_query", + "method": { + "fullName": "google.firestore.v1.Firestore.PartitionQuery", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "PartitionQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.PartitionQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryPager", + "shortName": "partition_query" + }, + "description": "Sample for PartitionQuery", + "file": "firestore_v1_generated_firestore_partition_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_PartitionQuery_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_partition_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.rollback", + "method": { + "fullName": "google.firestore.v1.Firestore.Rollback", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "Rollback" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.RollbackRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "transaction", + "type": "bytes" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "rollback" + }, + "description": "Sample for Rollback", + "file": "firestore_v1_generated_firestore_rollback_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_Rollback_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_rollback_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.rollback", + "method": { + "fullName": "google.firestore.v1.Firestore.Rollback", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "Rollback" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.RollbackRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "transaction", + "type": "bytes" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "rollback" + }, + "description": "Sample for Rollback", + "file": "firestore_v1_generated_firestore_rollback_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_Rollback_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_rollback_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.run_aggregation_query", + "method": { + "fullName": "google.firestore.v1.Firestore.RunAggregationQuery", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "RunAggregationQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.RunAggregationQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]", + "shortName": "run_aggregation_query" + }, + "description": "Sample for RunAggregationQuery", + "file": "firestore_v1_generated_firestore_run_aggregation_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_RunAggregationQuery_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_run_aggregation_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.run_aggregation_query", + "method": { + "fullName": "google.firestore.v1.Firestore.RunAggregationQuery", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "RunAggregationQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.RunAggregationQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]", + "shortName": "run_aggregation_query" + }, + "description": "Sample for RunAggregationQuery", + "file": "firestore_v1_generated_firestore_run_aggregation_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_RunAggregationQuery_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_run_aggregation_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.run_query", + "method": { + "fullName": "google.firestore.v1.Firestore.RunQuery", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "RunQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.RunQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.RunQueryResponse]", + "shortName": "run_query" + }, + "description": "Sample for RunQuery", + "file": "firestore_v1_generated_firestore_run_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_RunQuery_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_run_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.run_query", + "method": { + "fullName": "google.firestore.v1.Firestore.RunQuery", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "RunQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.RunQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.RunQueryResponse]", + "shortName": "run_query" + }, + "description": "Sample for RunQuery", + "file": "firestore_v1_generated_firestore_run_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_RunQuery_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_run_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.update_document", + "method": { + "fullName": "google.firestore.v1.Firestore.UpdateDocument", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.cloud.firestore_v1.types.Document" + }, + { + "name": "update_mask", + "type": "google.cloud.firestore_v1.types.DocumentMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "firestore_v1_generated_firestore_update_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_UpdateDocument_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.update_document", + "method": { + "fullName": "google.firestore.v1.Firestore.UpdateDocument", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_v1.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.cloud.firestore_v1.types.Document" + }, + { + "name": "update_mask", + "type": "google.cloud.firestore_v1.types.DocumentMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_v1.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "firestore_v1_generated_firestore_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_UpdateDocument_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", + "shortName": "FirestoreAsyncClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.write", + "method": { + "fullName": "google.firestore.v1.Firestore.Write", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "Write" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.firestore_v1.types.WriteRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.WriteResponse]", + "shortName": "write" + }, + "description": "Sample for Write", + "file": "firestore_v1_generated_firestore_write_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_Write_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_write_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_v1.FirestoreClient", + "shortName": "FirestoreClient" + }, + "fullName": "google.cloud.firestore_v1.FirestoreClient.write", + "method": { + "fullName": "google.firestore.v1.Firestore.Write", + "service": { + "fullName": "google.firestore.v1.Firestore", + "shortName": "Firestore" + }, + "shortName": "Write" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.firestore_v1.types.WriteRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.firestore_v1.types.WriteResponse]", + "shortName": "write" + }, + "description": "Sample for Write", + "file": "firestore_v1_generated_firestore_write_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_Firestore_Write_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_write_sync.py" + } + ] +} diff --git a/owl-bot-staging/firestore/v1/scripts/fixup_firestore_v1_keywords.py b/owl-bot-staging/firestore/v1/scripts/fixup_firestore_v1_keywords.py new file mode 100644 index 0000000000..1b23d34f11 --- /dev/null +++ b/owl-bot-staging/firestore/v1/scripts/fixup_firestore_v1_keywords.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class firestoreCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_get_documents': ('database', 'documents', 'mask', 'transaction', 'new_transaction', 'read_time', ), + 'batch_write': ('database', 'writes', 'labels', ), + 'begin_transaction': ('database', 'options', ), + 'commit': ('database', 'writes', 'transaction', ), + 'create_document': ('parent', 'collection_id', 'document', 'document_id', 'mask', ), + 'delete_document': ('name', 'current_document', ), + 'get_document': ('name', 'mask', 'transaction', 'read_time', ), + 'list_collection_ids': ('parent', 'page_size', 'page_token', 'read_time', ), + 'list_documents': ('parent', 'collection_id', 'page_size', 'page_token', 'order_by', 'mask', 'transaction', 'read_time', 'show_missing', ), + 'listen': ('database', 'add_target', 'remove_target', 'labels', ), + 'partition_query': ('parent', 'structured_query', 'partition_count', 'page_token', 'page_size', 'read_time', ), + 'rollback': ('database', 'transaction', ), + 'run_aggregation_query': ('parent', 'structured_aggregation_query', 'transaction', 'new_transaction', 'read_time', 'explain_options', ), + 'run_query': ('parent', 'structured_query', 'transaction', 'new_transaction', 'read_time', 'explain_options', ), + 'update_document': ('document', 'update_mask', 'mask', 'current_document', ), + 'write': ('database', 'stream_id', 'writes', 'stream_token', 'labels', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=firestoreCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the firestore client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/firestore/v1/setup.py b/owl-bot-staging/firestore/v1/setup.py new file mode 100644 index 0000000000..074598b595 --- /dev/null +++ b/owl-bot-staging/firestore/v1/setup.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-firestore' + + +description = "Google Cloud Firestore API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/firestore/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-firestore" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.10.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.11.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.12.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.7.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000000..b8a550c738 --- /dev/null +++ b/owl-bot-staging/firestore/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.8.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.9.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore/v1/tests/__init__.py b/owl-bot-staging/firestore/v1/tests/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore/v1/tests/unit/__init__.py b/owl-bot-staging/firestore/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/firestore/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/__init__.py b/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/test_firestore.py b/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/test_firestore.py new file mode 100644 index 0000000000..2ac61081c7 --- /dev/null +++ b/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/test_firestore.py @@ -0,0 +1,9455 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.firestore_v1.services.firestore import FirestoreAsyncClient +from google.cloud.firestore_v1.services.firestore import FirestoreClient +from google.cloud.firestore_v1.services.firestore import pagers +from google.cloud.firestore_v1.services.firestore import transports +from google.cloud.firestore_v1.types import aggregation_result +from google.cloud.firestore_v1.types import common +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.cloud.firestore_v1.types import query +from google.cloud.firestore_v1.types import query_profile +from google.cloud.firestore_v1.types import write as gf_write +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert FirestoreClient._get_default_mtls_endpoint(None) is None + assert FirestoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert FirestoreClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert FirestoreClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert FirestoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert FirestoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert FirestoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert FirestoreClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert FirestoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + FirestoreClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert FirestoreClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert FirestoreClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert FirestoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + FirestoreClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert FirestoreClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert FirestoreClient._get_client_cert_source(None, False) is None + assert FirestoreClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert FirestoreClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert FirestoreClient._get_client_cert_source(None, True) is mock_default_cert_source + assert FirestoreClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(FirestoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreClient)) +@mock.patch.object(FirestoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = FirestoreClient._DEFAULT_UNIVERSE + default_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert FirestoreClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert FirestoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == FirestoreClient.DEFAULT_MTLS_ENDPOINT + assert FirestoreClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert FirestoreClient._get_api_endpoint(None, None, default_universe, "always") == FirestoreClient.DEFAULT_MTLS_ENDPOINT + assert FirestoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == FirestoreClient.DEFAULT_MTLS_ENDPOINT + assert FirestoreClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert FirestoreClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + FirestoreClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert FirestoreClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert FirestoreClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert FirestoreClient._get_universe_domain(None, None) == FirestoreClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + FirestoreClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), + (FirestoreClient, transports.FirestoreRestTransport, "rest"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FirestoreClient, "grpc"), + (FirestoreAsyncClient, "grpc_asyncio"), + (FirestoreClient, "rest"), +]) +def test_firestore_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'firestore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://firestore.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.FirestoreGrpcTransport, "grpc"), + (transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.FirestoreRestTransport, "rest"), +]) +def test_firestore_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FirestoreClient, "grpc"), + (FirestoreAsyncClient, "grpc_asyncio"), + (FirestoreClient, "rest"), +]) +def test_firestore_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'firestore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://firestore.googleapis.com' + ) + + +def test_firestore_client_get_transport_class(): + transport = FirestoreClient.get_transport_class() + available_transports = [ + transports.FirestoreGrpcTransport, + transports.FirestoreRestTransport, + ] + assert transport in available_transports + + transport = FirestoreClient.get_transport_class("grpc") + assert transport == transports.FirestoreGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), + (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio"), + (FirestoreClient, transports.FirestoreRestTransport, "rest"), +]) +@mock.patch.object(FirestoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreClient)) +@mock.patch.object(FirestoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAsyncClient)) +def test_firestore_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(FirestoreClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(FirestoreClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", "true"), + (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", "false"), + (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (FirestoreClient, transports.FirestoreRestTransport, "rest", "true"), + (FirestoreClient, transports.FirestoreRestTransport, "rest", "false"), +]) +@mock.patch.object(FirestoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreClient)) +@mock.patch.object(FirestoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_firestore_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + FirestoreClient, FirestoreAsyncClient +]) +@mock.patch.object(FirestoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreClient)) +@mock.patch.object(FirestoreAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreAsyncClient)) +def test_firestore_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + FirestoreClient, FirestoreAsyncClient +]) +@mock.patch.object(FirestoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreClient)) +@mock.patch.object(FirestoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAsyncClient)) +def test_firestore_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = FirestoreClient._DEFAULT_UNIVERSE + default_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), + (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio"), + (FirestoreClient, transports.FirestoreRestTransport, "rest"), +]) +def test_firestore_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", grpc_helpers), + (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (FirestoreClient, transports.FirestoreRestTransport, "rest", None), +]) +def test_firestore_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_firestore_client_client_options_from_dict(): + with mock.patch('google.cloud.firestore_v1.services.firestore.transports.FirestoreGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = FirestoreClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", grpc_helpers), + (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_firestore_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', +), + scopes=None, + default_host="firestore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.GetDocumentRequest, + dict, +]) +def test_get_document(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = document.Document( + name='name_value', + ) + response = client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.GetDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == 'name_value' + + +def test_get_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_document), + '__call__') as call: + client.get_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.GetDocumentRequest() + + +def test_get_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.GetDocumentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_document), + '__call__') as call: + client.get_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.GetDocumentRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_document_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document( + name='name_value', + )) + response = await client.get_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.GetDocumentRequest() + +@pytest.mark.asyncio +async def test_get_document_async(transport: str = 'grpc_asyncio', request_type=firestore.GetDocumentRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document.Document( + name='name_value', + )) + response = await client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.GetDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_document_async_from_dict(): + await test_get_document_async(request_type=dict) + + +def test_get_document_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.GetDocumentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_document), + '__call__') as call: + call.return_value = document.Document() + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_document_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.GetDocumentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_document), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) + await client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + firestore.ListDocumentsRequest, + dict, +]) +def test_list_documents(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.ListDocumentsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.ListDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__') as call: + client.list_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.ListDocumentsRequest() + + +def test_list_documents_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.ListDocumentsRequest( + parent='parent_value', + collection_id='collection_id_value', + page_token='page_token_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__') as call: + client.list_documents(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.ListDocumentsRequest( + parent='parent_value', + collection_id='collection_id_value', + page_token='page_token_value', + order_by='order_by_value', + ) + +@pytest.mark.asyncio +async def test_list_documents_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListDocumentsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.ListDocumentsRequest() + +@pytest.mark.asyncio +async def test_list_documents_async(transport: str = 'grpc_asyncio', request_type=firestore.ListDocumentsRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListDocumentsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.ListDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_documents_async_from_dict(): + await test_list_documents_async(request_type=dict) + + +def test_list_documents_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.ListDocumentsRequest() + + request.parent = 'parent_value' + request.collection_id = 'collection_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__') as call: + call.return_value = firestore.ListDocumentsResponse() + client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value&collection_id=collection_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_documents_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.ListDocumentsRequest() + + request.parent = 'parent_value' + request.collection_id = 'collection_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListDocumentsResponse()) + await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value&collection_id=collection_id_value', + ) in kw['metadata'] + + +def test_list_documents_pager(transport_name: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token='abc', + ), + firestore.ListDocumentsResponse( + documents=[], + next_page_token='def', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token='ghi', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + ('collection_id', ''), + )), + ) + pager = client.list_documents(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) + for i in results) +def test_list_documents_pages(transport_name: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token='abc', + ), + firestore.ListDocumentsResponse( + documents=[], + next_page_token='def', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token='ghi', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + RuntimeError, + ) + pages = list(client.list_documents(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_documents_async_pager(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token='abc', + ), + firestore.ListDocumentsResponse( + documents=[], + next_page_token='def', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token='ghi', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_documents(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, document.Document) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_documents_async_pages(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token='abc', + ), + firestore.ListDocumentsResponse( + documents=[], + next_page_token='def', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token='ghi', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_documents(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + firestore.UpdateDocumentRequest, + dict, +]) +def test_update_document(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gf_document.Document( + name='name_value', + ) + response = client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.UpdateDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gf_document.Document) + assert response.name == 'name_value' + + +def test_update_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_document), + '__call__') as call: + client.update_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.UpdateDocumentRequest() + + +def test_update_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.UpdateDocumentRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_document), + '__call__') as call: + client.update_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.UpdateDocumentRequest( + ) + +@pytest.mark.asyncio +async def test_update_document_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gf_document.Document( + name='name_value', + )) + response = await client.update_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.UpdateDocumentRequest() + +@pytest.mark.asyncio +async def test_update_document_async(transport: str = 'grpc_asyncio', request_type=firestore.UpdateDocumentRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gf_document.Document( + name='name_value', + )) + response = await client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.UpdateDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gf_document.Document) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_update_document_async_from_dict(): + await test_update_document_async(request_type=dict) + + +def test_update_document_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.UpdateDocumentRequest() + + request.document.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_document), + '__call__') as call: + call.return_value = gf_document.Document() + client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'document.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_document_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.UpdateDocumentRequest() + + request.document.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_document), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gf_document.Document()) + await client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'document.name=name_value', + ) in kw['metadata'] + + +def test_update_document_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gf_document.Document() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_document( + document=gf_document.Document(name='name_value'), + update_mask=common.DocumentMask(field_paths=['field_paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = gf_document.Document(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = common.DocumentMask(field_paths=['field_paths_value']) + assert arg == mock_val + + +def test_update_document_flattened_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_document( + firestore.UpdateDocumentRequest(), + document=gf_document.Document(name='name_value'), + update_mask=common.DocumentMask(field_paths=['field_paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_document_flattened_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gf_document.Document() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gf_document.Document()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_document( + document=gf_document.Document(name='name_value'), + update_mask=common.DocumentMask(field_paths=['field_paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = gf_document.Document(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = common.DocumentMask(field_paths=['field_paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_document_flattened_error_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_document( + firestore.UpdateDocumentRequest(), + document=gf_document.Document(name='name_value'), + update_mask=common.DocumentMask(field_paths=['field_paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.DeleteDocumentRequest, + dict, +]) +def test_delete_document(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.DeleteDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_document), + '__call__') as call: + client.delete_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.DeleteDocumentRequest() + + +def test_delete_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.DeleteDocumentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_document), + '__call__') as call: + client.delete_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.DeleteDocumentRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_document_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.DeleteDocumentRequest() + +@pytest.mark.asyncio +async def test_delete_document_async(transport: str = 'grpc_asyncio', request_type=firestore.DeleteDocumentRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.DeleteDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_document_async_from_dict(): + await test_delete_document_async(request_type=dict) + + +def test_delete_document_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.DeleteDocumentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_document), + '__call__') as call: + call.return_value = None + client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_document_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.DeleteDocumentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_document), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_document_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_document( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_document_flattened_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document( + firestore.DeleteDocumentRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_document_flattened_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_document( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_document_flattened_error_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_document( + firestore.DeleteDocumentRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.BatchGetDocumentsRequest, + dict, +]) +def test_batch_get_documents(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([firestore.BatchGetDocumentsResponse()]) + response = client.batch_get_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.BatchGetDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, firestore.BatchGetDocumentsResponse) + + +def test_batch_get_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), + '__call__') as call: + client.batch_get_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.BatchGetDocumentsRequest() + + +def test_batch_get_documents_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.BatchGetDocumentsRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), + '__call__') as call: + client.batch_get_documents(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.BatchGetDocumentsRequest( + database='database_value', + ) + +@pytest.mark.asyncio +async def test_batch_get_documents_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.BatchGetDocumentsResponse()]) + response = await client.batch_get_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.BatchGetDocumentsRequest() + +@pytest.mark.asyncio +async def test_batch_get_documents_async(transport: str = 'grpc_asyncio', request_type=firestore.BatchGetDocumentsRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.BatchGetDocumentsResponse()]) + response = await client.batch_get_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.BatchGetDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, firestore.BatchGetDocumentsResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_async_from_dict(): + await test_batch_get_documents_async(request_type=dict) + + +def test_batch_get_documents_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.BatchGetDocumentsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), + '__call__') as call: + call.return_value = iter([firestore.BatchGetDocumentsResponse()]) + client.batch_get_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_get_documents_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.BatchGetDocumentsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.BatchGetDocumentsResponse()]) + await client.batch_get_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + firestore.BeginTransactionRequest, + dict, +]) +def test_begin_transaction(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.BeginTransactionResponse( + transaction=b'transaction_blob', + ) + response = client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.BeginTransactionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BeginTransactionResponse) + assert response.transaction == b'transaction_blob' + + +def test_begin_transaction_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + client.begin_transaction() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.BeginTransactionRequest() + + +def test_begin_transaction_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.BeginTransactionRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + client.begin_transaction(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.BeginTransactionRequest( + database='database_value', + ) + +@pytest.mark.asyncio +async def test_begin_transaction_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BeginTransactionResponse( + transaction=b'transaction_blob', + )) + response = await client.begin_transaction() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.BeginTransactionRequest() + +@pytest.mark.asyncio +async def test_begin_transaction_async(transport: str = 'grpc_asyncio', request_type=firestore.BeginTransactionRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.BeginTransactionResponse( + transaction=b'transaction_blob', + )) + response = await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.BeginTransactionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BeginTransactionResponse) + assert response.transaction == b'transaction_blob' + + +@pytest.mark.asyncio +async def test_begin_transaction_async_from_dict(): + await test_begin_transaction_async(request_type=dict) + + +def test_begin_transaction_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.BeginTransactionRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = firestore.BeginTransactionResponse() + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_begin_transaction_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.BeginTransactionRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BeginTransactionResponse()) + await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_begin_transaction_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.BeginTransactionResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.begin_transaction( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + + +def test_begin_transaction_flattened_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.begin_transaction( + firestore.BeginTransactionRequest(), + database='database_value', + ) + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.BeginTransactionResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BeginTransactionResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.begin_transaction( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_error_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.begin_transaction( + firestore.BeginTransactionRequest(), + database='database_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.CommitRequest, + dict, +]) +def test_commit(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.CommitResponse( + ) + response = client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.CommitRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.CommitResponse) + + +def test_commit_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + client.commit() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.CommitRequest() + + +def test_commit_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.CommitRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + client.commit(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.CommitRequest( + database='database_value', + ) + +@pytest.mark.asyncio +async def test_commit_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.CommitResponse( + )) + response = await client.commit() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.CommitRequest() + +@pytest.mark.asyncio +async def test_commit_async(transport: str = 'grpc_asyncio', request_type=firestore.CommitRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.CommitResponse( + )) + response = await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.CommitRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.CommitResponse) + + +@pytest.mark.asyncio +async def test_commit_async_from_dict(): + await test_commit_async(request_type=dict) + + +def test_commit_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.CommitRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = firestore.CommitResponse() + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_commit_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.CommitRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.CommitResponse()) + await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_commit_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.CommitResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.commit( + database='database_value', + writes=[gf_write.Write(update=document.Document(name='name_value'))], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].writes + mock_val = [gf_write.Write(update=document.Document(name='name_value'))] + assert arg == mock_val + + +def test_commit_flattened_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit( + firestore.CommitRequest(), + database='database_value', + writes=[gf_write.Write(update=document.Document(name='name_value'))], + ) + +@pytest.mark.asyncio +async def test_commit_flattened_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.CommitResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.CommitResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.commit( + database='database_value', + writes=[gf_write.Write(update=document.Document(name='name_value'))], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].writes + mock_val = [gf_write.Write(update=document.Document(name='name_value'))] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_commit_flattened_error_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.commit( + firestore.CommitRequest(), + database='database_value', + writes=[gf_write.Write(update=document.Document(name='name_value'))], + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.RollbackRequest, + dict, +]) +def test_rollback(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.RollbackRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_rollback_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + client.rollback() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.RollbackRequest() + + +def test_rollback_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.RollbackRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + client.rollback(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.RollbackRequest( + database='database_value', + ) + +@pytest.mark.asyncio +async def test_rollback_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.rollback() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.RollbackRequest() + +@pytest.mark.asyncio +async def test_rollback_async(transport: str = 'grpc_asyncio', request_type=firestore.RollbackRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.RollbackRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_rollback_async_from_dict(): + await test_rollback_async(request_type=dict) + + +def test_rollback_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.RollbackRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = None + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rollback_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.RollbackRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_rollback_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rollback( + database='database_value', + transaction=b'transaction_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].transaction + mock_val = b'transaction_blob' + assert arg == mock_val + + +def test_rollback_flattened_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback( + firestore.RollbackRequest(), + database='database_value', + transaction=b'transaction_blob', + ) + +@pytest.mark.asyncio +async def test_rollback_flattened_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rollback( + database='database_value', + transaction=b'transaction_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].transaction + mock_val = b'transaction_blob' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_rollback_flattened_error_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rollback( + firestore.RollbackRequest(), + database='database_value', + transaction=b'transaction_blob', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.RunQueryRequest, + dict, +]) +def test_run_query(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([firestore.RunQueryResponse()]) + response = client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.RunQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, firestore.RunQueryResponse) + + +def test_run_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_query), + '__call__') as call: + client.run_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.RunQueryRequest() + + +def test_run_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.RunQueryRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_query), + '__call__') as call: + client.run_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.RunQueryRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_run_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunQueryResponse()]) + response = await client.run_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.RunQueryRequest() + +@pytest.mark.asyncio +async def test_run_query_async(transport: str = 'grpc_asyncio', request_type=firestore.RunQueryRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunQueryResponse()]) + response = await client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.RunQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, firestore.RunQueryResponse) + + +@pytest.mark.asyncio +async def test_run_query_async_from_dict(): + await test_run_query_async(request_type=dict) + + +def test_run_query_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.RunQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_query), + '__call__') as call: + call.return_value = iter([firestore.RunQueryResponse()]) + client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_query_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.RunQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_query), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunQueryResponse()]) + await client.run_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + firestore.RunAggregationQueryRequest, + dict, +]) +def test_run_aggregation_query(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([firestore.RunAggregationQueryResponse()]) + response = client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.RunAggregationQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, firestore.RunAggregationQueryResponse) + + +def test_run_aggregation_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), + '__call__') as call: + client.run_aggregation_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.RunAggregationQueryRequest() + + +def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.RunAggregationQueryRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), + '__call__') as call: + client.run_aggregation_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.RunAggregationQueryRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_run_aggregation_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunAggregationQueryResponse()]) + response = await client.run_aggregation_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.RunAggregationQueryRequest() + +@pytest.mark.asyncio +async def test_run_aggregation_query_async(transport: str = 'grpc_asyncio', request_type=firestore.RunAggregationQueryRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunAggregationQueryResponse()]) + response = await client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.RunAggregationQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, firestore.RunAggregationQueryResponse) + + +@pytest.mark.asyncio +async def test_run_aggregation_query_async_from_dict(): + await test_run_aggregation_query_async(request_type=dict) + + +def test_run_aggregation_query_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.RunAggregationQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), + '__call__') as call: + call.return_value = iter([firestore.RunAggregationQueryResponse()]) + client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_aggregation_query_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.RunAggregationQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_aggregation_query), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunAggregationQueryResponse()]) + await client.run_aggregation_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + firestore.PartitionQueryRequest, + dict, +]) +def test_partition_query(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.PartitionQueryResponse( + next_page_token='next_page_token_value', + ) + response = client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.PartitionQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.PartitionQueryPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_partition_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + client.partition_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.PartitionQueryRequest() + + +def test_partition_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.PartitionQueryRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + client.partition_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.PartitionQueryRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_partition_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.PartitionQueryResponse( + next_page_token='next_page_token_value', + )) + response = await client.partition_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.PartitionQueryRequest() + +@pytest.mark.asyncio +async def test_partition_query_async(transport: str = 'grpc_asyncio', request_type=firestore.PartitionQueryRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.PartitionQueryResponse( + next_page_token='next_page_token_value', + )) + response = await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.PartitionQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.PartitionQueryAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_partition_query_async_from_dict(): + await test_partition_query_async(request_type=dict) + + +def test_partition_query_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.PartitionQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = firestore.PartitionQueryResponse() + client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_partition_query_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.PartitionQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.PartitionQueryResponse()) + await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_partition_query_pager(transport_name: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + query.Cursor(), + ], + next_page_token='abc', + ), + firestore.PartitionQueryResponse( + partitions=[], + next_page_token='def', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + ], + next_page_token='ghi', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.partition_query(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, query.Cursor) + for i in results) +def test_partition_query_pages(transport_name: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + query.Cursor(), + ], + next_page_token='abc', + ), + firestore.PartitionQueryResponse( + partitions=[], + next_page_token='def', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + ], + next_page_token='ghi', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + ], + ), + RuntimeError, + ) + pages = list(client.partition_query(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_partition_query_async_pager(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + query.Cursor(), + ], + next_page_token='abc', + ), + firestore.PartitionQueryResponse( + partitions=[], + next_page_token='def', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + ], + next_page_token='ghi', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + ], + ), + RuntimeError, + ) + async_pager = await client.partition_query(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, query.Cursor) + for i in responses) + + +@pytest.mark.asyncio +async def test_partition_query_async_pages(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + query.Cursor(), + ], + next_page_token='abc', + ), + firestore.PartitionQueryResponse( + partitions=[], + next_page_token='def', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + ], + next_page_token='ghi', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.partition_query(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + firestore.WriteRequest, + dict, +]) +def test_write(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([firestore.WriteResponse()]) + response = client.write(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, firestore.WriteResponse) + + +@pytest.mark.asyncio +async def test_write_async(transport: str = 'grpc_asyncio', request_type=firestore.WriteRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.WriteResponse()]) + response = await client.write(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, firestore.WriteResponse) + + +@pytest.mark.asyncio +async def test_write_async_from_dict(): + await test_write_async(request_type=dict) + + +@pytest.mark.parametrize("request_type", [ + firestore.ListenRequest, + dict, +]) +def test_listen(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.listen), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([firestore.ListenResponse()]) + response = client.listen(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, firestore.ListenResponse) + + +@pytest.mark.asyncio +async def test_listen_async(transport: str = 'grpc_asyncio', request_type=firestore.ListenRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.listen), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[firestore.ListenResponse()]) + response = await client.listen(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, firestore.ListenResponse) + + +@pytest.mark.asyncio +async def test_listen_async_from_dict(): + await test_listen_async(request_type=dict) + + +@pytest.mark.parametrize("request_type", [ + firestore.ListCollectionIdsRequest, + dict, +]) +def test_list_collection_ids(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.ListCollectionIdsResponse( + collection_ids=['collection_ids_value'], + next_page_token='next_page_token_value', + ) + response = client.list_collection_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.ListCollectionIdsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCollectionIdsPager) + assert response.collection_ids == ['collection_ids_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_list_collection_ids_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + client.list_collection_ids() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.ListCollectionIdsRequest() + + +def test_list_collection_ids_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.ListCollectionIdsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + client.list_collection_ids(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.ListCollectionIdsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_collection_ids_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListCollectionIdsResponse( + collection_ids=['collection_ids_value'], + next_page_token='next_page_token_value', + )) + response = await client.list_collection_ids() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.ListCollectionIdsRequest() + +@pytest.mark.asyncio +async def test_list_collection_ids_async(transport: str = 'grpc_asyncio', request_type=firestore.ListCollectionIdsRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListCollectionIdsResponse( + collection_ids=['collection_ids_value'], + next_page_token='next_page_token_value', + )) + response = await client.list_collection_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.ListCollectionIdsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCollectionIdsAsyncPager) + assert response.collection_ids == ['collection_ids_value'] + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_collection_ids_async_from_dict(): + await test_list_collection_ids_async(request_type=dict) + + +def test_list_collection_ids_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.ListCollectionIdsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + call.return_value = firestore.ListCollectionIdsResponse() + client.list_collection_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_collection_ids_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.ListCollectionIdsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListCollectionIdsResponse()) + await client.list_collection_ids(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_collection_ids_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.ListCollectionIdsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_collection_ids( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_collection_ids_flattened_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_collection_ids( + firestore.ListCollectionIdsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_collection_ids_flattened_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.ListCollectionIdsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListCollectionIdsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_collection_ids( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_collection_ids_flattened_error_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_collection_ids( + firestore.ListCollectionIdsRequest(), + parent='parent_value', + ) + + +def test_list_collection_ids_pager(transport_name: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[], + next_page_token='def', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + ], + next_page_token='ghi', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_collection_ids(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) +def test_list_collection_ids_pages(transport_name: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[], + next_page_token='def', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + ], + next_page_token='ghi', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.list_collection_ids(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_collection_ids_async_pager(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[], + next_page_token='def', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + ], + next_page_token='ghi', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_collection_ids(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_collection_ids_async_pages(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collection_ids), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[], + next_page_token='def', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + ], + next_page_token='ghi', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_collection_ids(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + firestore.BatchWriteRequest, + dict, +]) +def test_batch_write(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore.BatchWriteResponse( + ) + response = client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.BatchWriteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BatchWriteResponse) + + +def test_batch_write_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + client.batch_write() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.BatchWriteRequest() + + +def test_batch_write_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.BatchWriteRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + client.batch_write(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.BatchWriteRequest( + database='database_value', + ) + +@pytest.mark.asyncio +async def test_batch_write_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BatchWriteResponse( + )) + response = await client.batch_write() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.BatchWriteRequest() + +@pytest.mark.asyncio +async def test_batch_write_async(transport: str = 'grpc_asyncio', request_type=firestore.BatchWriteRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.BatchWriteResponse( + )) + response = await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.BatchWriteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BatchWriteResponse) + + +@pytest.mark.asyncio +async def test_batch_write_async_from_dict(): + await test_batch_write_async(request_type=dict) + + +def test_batch_write_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.BatchWriteRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value = firestore.BatchWriteResponse() + client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_write_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.BatchWriteRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BatchWriteResponse()) + await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + firestore.CreateDocumentRequest, + dict, +]) +def test_create_document(request_type, transport: str = 'grpc'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = document.Document( + name='name_value', + ) + response = client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.CreateDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == 'name_value' + + +def test_create_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_document), + '__call__') as call: + client.create_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.CreateDocumentRequest() + + +def test_create_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.CreateDocumentRequest( + parent='parent_value', + collection_id='collection_id_value', + document_id='document_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_document), + '__call__') as call: + client.create_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.CreateDocumentRequest( + parent='parent_value', + collection_id='collection_id_value', + document_id='document_id_value', + ) + +@pytest.mark.asyncio +async def test_create_document_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document( + name='name_value', + )) + response = await client.create_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.CreateDocumentRequest() + +@pytest.mark.asyncio +async def test_create_document_async(transport: str = 'grpc_asyncio', request_type=firestore.CreateDocumentRequest): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_document), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document.Document( + name='name_value', + )) + response = await client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.CreateDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_document_async_from_dict(): + await test_create_document_async(request_type=dict) + + +def test_create_document_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.CreateDocumentRequest() + + request.parent = 'parent_value' + request.collection_id = 'collection_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_document), + '__call__') as call: + call.return_value = document.Document() + client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value&collection_id=collection_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_document_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore.CreateDocumentRequest() + + request.parent = 'parent_value' + request.collection_id = 'collection_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_document), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) + await client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value&collection_id=collection_id_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + firestore.GetDocumentRequest, + dict, +]) +def test_get_document_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == 'name_value' + + +def test_get_document_rest_required_fields(request_type=firestore.GetDocumentRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("mask", "read_time", "transaction", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(("mask", "readTime", "transaction", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_get_document") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_get_document") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.GetDocumentRequest.pb(firestore.GetDocumentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = firestore.GetDocumentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request(transport: str = 'rest', request_type=firestore.GetDocumentRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.ListDocumentsRequest, + dict, +]) +def test_list_documents_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents/sample3/sample4', 'collection_id': 'sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.ListDocumentsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_documents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_documents_rest_required_fields(request_type=firestore.ListDocumentsRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("mask", "order_by", "page_size", "page_token", "read_time", "show_missing", "transaction", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.ListDocumentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_documents(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_documents_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(("mask", "orderBy", "pageSize", "pageToken", "readTime", "showMissing", "transaction", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_documents_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_list_documents") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_list_documents") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.ListDocumentsRequest.pb(firestore.ListDocumentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore.ListDocumentsResponse.to_json(firestore.ListDocumentsResponse()) + + request = firestore.ListDocumentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.ListDocumentsResponse() + + client.list_documents(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_documents_rest_bad_request(transport: str = 'rest', request_type=firestore.ListDocumentsRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents/sample3/sample4', 'collection_id': 'sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_documents(request) + + +def test_list_documents_rest_pager(transport: str = 'rest'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token='abc', + ), + firestore.ListDocumentsResponse( + documents=[], + next_page_token='def', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token='ghi', + ), + firestore.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(firestore.ListDocumentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/databases/sample2/documents/sample3/sample4', 'collection_id': 'sample5'} + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) + for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + firestore.UpdateDocumentRequest, + dict, +]) +def test_update_document_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'document': {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'}} + request_init["document"] = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4', 'fields': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore.UpdateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gf_document.Document( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gf_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gf_document.Document) + assert response.name == 'name_value' + + +def test_update_document_rest_required_fields(request_type=firestore.UpdateDocumentRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("current_document", "mask", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gf_document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gf_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_document(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_document_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(("currentDocument", "mask", "updateMask", )) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_document_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_update_document") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_update_document") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.UpdateDocumentRequest.pb(firestore.UpdateDocumentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gf_document.Document.to_json(gf_document.Document()) + + request = firestore.UpdateDocumentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gf_document.Document() + + client.update_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_document_rest_bad_request(transport: str = 'rest', request_type=firestore.UpdateDocumentRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'document': {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_document(request) + + +def test_update_document_rest_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gf_document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = {'document': {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + document=gf_document.Document(name='name_value'), + update_mask=common.DocumentMask(field_paths=['field_paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gf_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{document.name=projects/*/databases/*/documents/*/**}" % client.transport._host, args[1]) + + +def test_update_document_rest_flattened_error(transport: str = 'rest'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_document( + firestore.UpdateDocumentRequest(), + document=gf_document.Document(name='name_value'), + update_mask=common.DocumentMask(field_paths=['field_paths_value']), + ) + + +def test_update_document_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.DeleteDocumentRequest, + dict, +]) +def test_delete_document_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_document(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_rest_required_fields(request_type=firestore.DeleteDocumentRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("current_document", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_document(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_document_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(("currentDocument", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_document_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_delete_document") as pre: + pre.assert_not_called() + pb_message = firestore.DeleteDocumentRequest.pb(firestore.DeleteDocumentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = firestore.DeleteDocumentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_document_rest_bad_request(transport: str = 'rest', request_type=firestore.DeleteDocumentRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_document(request) + + +def test_delete_document_rest_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*/documents/*/**}" % client.transport._host, args[1]) + + +def test_delete_document_rest_flattened_error(transport: str = 'rest'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document( + firestore.DeleteDocumentRequest(), + name='name_value', + ) + + +def test_delete_document_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.BatchGetDocumentsRequest, + dict, +]) +def test_batch_get_documents_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.BatchGetDocumentsResponse( + transaction=b'transaction_blob', + missing='missing_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.BatchGetDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.batch_get_documents(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BatchGetDocumentsResponse) + assert response.transaction == b'transaction_blob' + + +def test_batch_get_documents_rest_required_fields(request_type=firestore.BatchGetDocumentsRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.BatchGetDocumentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.BatchGetDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.batch_get_documents(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_get_documents_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_get_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_documents_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_batch_get_documents") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_batch_get_documents") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.BatchGetDocumentsRequest.pb(firestore.BatchGetDocumentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore.BatchGetDocumentsResponse.to_json(firestore.BatchGetDocumentsResponse()) + req.return_value._content = "[{}]".format(req.return_value._content) + + request = firestore.BatchGetDocumentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.BatchGetDocumentsResponse() + + client.batch_get_documents(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_get_documents_rest_bad_request(transport: str = 'rest', request_type=firestore.BatchGetDocumentsRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_get_documents(request) + + +def test_batch_get_documents_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.BeginTransactionRequest, + dict, +]) +def test_begin_transaction_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.BeginTransactionResponse( + transaction=b'transaction_blob', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.begin_transaction(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BeginTransactionResponse) + assert response.transaction == b'transaction_blob' + + +def test_begin_transaction_rest_required_fields(request_type=firestore.BeginTransactionRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.BeginTransactionResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.begin_transaction(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_begin_transaction_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.begin_transaction._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_begin_transaction_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_begin_transaction") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_begin_transaction") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.BeginTransactionRequest.pb(firestore.BeginTransactionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore.BeginTransactionResponse.to_json(firestore.BeginTransactionResponse()) + + request = firestore.BeginTransactionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.BeginTransactionResponse() + + client.begin_transaction(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_begin_transaction_rest_bad_request(transport: str = 'rest', request_type=firestore.BeginTransactionRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.begin_transaction(request) + + +def test_begin_transaction_rest_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.BeginTransactionResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/databases/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.BeginTransactionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.begin_transaction(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/databases/*}/documents:beginTransaction" % client.transport._host, args[1]) + + +def test_begin_transaction_rest_flattened_error(transport: str = 'rest'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.begin_transaction( + firestore.BeginTransactionRequest(), + database='database_value', + ) + + +def test_begin_transaction_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.CommitRequest, + dict, +]) +def test_commit_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.CommitResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.commit(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.CommitResponse) + + +def test_commit_rest_required_fields(request_type=firestore.CommitRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.CommitResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.commit(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_commit_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.commit._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_commit_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_commit") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_commit") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.CommitRequest.pb(firestore.CommitRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore.CommitResponse.to_json(firestore.CommitResponse()) + + request = firestore.CommitRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.CommitResponse() + + client.commit(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_commit_rest_bad_request(transport: str = 'rest', request_type=firestore.CommitRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.commit(request) + + +def test_commit_rest_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.CommitResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/databases/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + writes=[gf_write.Write(update=document.Document(name='name_value'))], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.commit(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/databases/*}/documents:commit" % client.transport._host, args[1]) + + +def test_commit_rest_flattened_error(transport: str = 'rest'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit( + firestore.CommitRequest(), + database='database_value', + writes=[gf_write.Write(update=document.Document(name='name_value'))], + ) + + +def test_commit_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.RollbackRequest, + dict, +]) +def test_rollback_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.rollback(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_rollback_rest_required_fields(request_type=firestore.RollbackRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["database"] = "" + request_init["transaction"] = b'' + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + jsonified_request["transaction"] = b'transaction_blob' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + assert "transaction" in jsonified_request + assert jsonified_request["transaction"] == b'transaction_blob' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.rollback(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_rollback_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.rollback._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", "transaction", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_rollback") as pre: + pre.assert_not_called() + pb_message = firestore.RollbackRequest.pb(firestore.RollbackRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = firestore.RollbackRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.rollback(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_rollback_rest_bad_request(transport: str = 'rest', request_type=firestore.RollbackRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rollback(request) + + +def test_rollback_rest_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/databases/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + transaction=b'transaction_blob', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.rollback(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/databases/*}/documents:rollback" % client.transport._host, args[1]) + + +def test_rollback_rest_flattened_error(transport: str = 'rest'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback( + firestore.RollbackRequest(), + database='database_value', + transaction=b'transaction_blob', + ) + + +def test_rollback_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.RunQueryRequest, + dict, +]) +def test_run_query_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.RunQueryResponse( + transaction=b'transaction_blob', + skipped_results=1633, + done=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.RunQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.run_query(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.RunQueryResponse) + assert response.transaction == b'transaction_blob' + assert response.skipped_results == 1633 + + +def test_run_query_rest_required_fields(request_type=firestore.RunQueryRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.RunQueryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.RunQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.run_query(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_run_query_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.run_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_query_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_run_query") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_run_query") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.RunQueryRequest.pb(firestore.RunQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore.RunQueryResponse.to_json(firestore.RunQueryResponse()) + req.return_value._content = "[{}]".format(req.return_value._content) + + request = firestore.RunQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.RunQueryResponse() + + client.run_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_query_rest_bad_request(transport: str = 'rest', request_type=firestore.RunQueryRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_query(request) + + +def test_run_query_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.RunAggregationQueryRequest, + dict, +]) +def test_run_aggregation_query_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.RunAggregationQueryResponse( + transaction=b'transaction_blob', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.RunAggregationQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.run_aggregation_query(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.RunAggregationQueryResponse) + assert response.transaction == b'transaction_blob' + + +def test_run_aggregation_query_rest_required_fields(request_type=firestore.RunAggregationQueryRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_aggregation_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_aggregation_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.RunAggregationQueryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.RunAggregationQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.run_aggregation_query(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_run_aggregation_query_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.run_aggregation_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_aggregation_query_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_run_aggregation_query") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_run_aggregation_query") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.RunAggregationQueryRequest.pb(firestore.RunAggregationQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore.RunAggregationQueryResponse.to_json(firestore.RunAggregationQueryResponse()) + req.return_value._content = "[{}]".format(req.return_value._content) + + request = firestore.RunAggregationQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.RunAggregationQueryResponse() + + client.run_aggregation_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_aggregation_query_rest_bad_request(transport: str = 'rest', request_type=firestore.RunAggregationQueryRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_aggregation_query(request) + + +def test_run_aggregation_query_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.PartitionQueryRequest, + dict, +]) +def test_partition_query_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.PartitionQueryResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.PartitionQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.partition_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.PartitionQueryPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_partition_query_rest_required_fields(request_type=firestore.PartitionQueryRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.PartitionQueryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.PartitionQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.partition_query(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_partition_query_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.partition_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_partition_query_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_partition_query") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_partition_query") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.PartitionQueryRequest.pb(firestore.PartitionQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore.PartitionQueryResponse.to_json(firestore.PartitionQueryResponse()) + + request = firestore.PartitionQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.PartitionQueryResponse() + + client.partition_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_partition_query_rest_bad_request(transport: str = 'rest', request_type=firestore.PartitionQueryRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.partition_query(request) + + +def test_partition_query_rest_pager(transport: str = 'rest'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + query.Cursor(), + ], + next_page_token='abc', + ), + firestore.PartitionQueryResponse( + partitions=[], + next_page_token='def', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + ], + next_page_token='ghi', + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(firestore.PartitionQueryResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/databases/sample2/documents'} + + pager = client.partition_query(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, query.Cursor) + for i in results) + + pages = list(client.partition_query(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_write_rest_unimplemented(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = firestore.WriteRequest() + requests = [request] + with pytest.raises(NotImplementedError): + client.write(requests) + + +def test_listen_rest_unimplemented(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = firestore.ListenRequest() + requests = [request] + with pytest.raises(NotImplementedError): + client.listen(requests) + + +@pytest.mark.parametrize("request_type", [ + firestore.ListCollectionIdsRequest, + dict, +]) +def test_list_collection_ids_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.ListCollectionIdsResponse( + collection_ids=['collection_ids_value'], + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.ListCollectionIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_collection_ids(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCollectionIdsPager) + assert response.collection_ids == ['collection_ids_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_list_collection_ids_rest_required_fields(request_type=firestore.ListCollectionIdsRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_collection_ids._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_collection_ids._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.ListCollectionIdsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.ListCollectionIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_collection_ids(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_collection_ids_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_collection_ids._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_collection_ids_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_list_collection_ids") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_list_collection_ids") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.ListCollectionIdsRequest.pb(firestore.ListCollectionIdsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore.ListCollectionIdsResponse.to_json(firestore.ListCollectionIdsResponse()) + + request = firestore.ListCollectionIdsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.ListCollectionIdsResponse() + + client.list_collection_ids(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_collection_ids_rest_bad_request(transport: str = 'rest', request_type=firestore.ListCollectionIdsRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_collection_ids(request) + + +def test_list_collection_ids_rest_flattened(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.ListCollectionIdsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/databases/sample2/documents'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.ListCollectionIdsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_collection_ids(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/databases/*/documents}:listCollectionIds" % client.transport._host, args[1]) + + +def test_list_collection_ids_rest_flattened_error(transport: str = 'rest'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_collection_ids( + firestore.ListCollectionIdsRequest(), + parent='parent_value', + ) + + +def test_list_collection_ids_rest_pager(transport: str = 'rest'): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[], + next_page_token='def', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + ], + next_page_token='ghi', + ), + firestore.ListCollectionIdsResponse( + collection_ids=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(firestore.ListCollectionIdsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/databases/sample2/documents'} + + pager = client.list_collection_ids(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) + + pages = list(client.list_collection_ids(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + firestore.BatchWriteRequest, + dict, +]) +def test_batch_write_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore.BatchWriteResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_write(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.BatchWriteResponse) + + +def test_batch_write_rest_required_fields(request_type=firestore.BatchWriteRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.BatchWriteResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.batch_write(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_write_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_write._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_write_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_batch_write") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_batch_write") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.BatchWriteRequest.pb(firestore.BatchWriteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore.BatchWriteResponse.to_json(firestore.BatchWriteResponse()) + + request = firestore.BatchWriteRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.BatchWriteResponse() + + client.batch_write(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_write_rest_bad_request(transport: str = 'rest', request_type=firestore.BatchWriteRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_write(request) + + +def test_batch_write_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore.CreateDocumentRequest, + dict, +]) +def test_create_document_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents/sample3', 'collection_id': 'sample4'} + request_init["document"] = {'name': 'name_value', 'fields': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == 'name_value' + + +def test_create_document_rest_required_fields(request_type=firestore.CreateDocumentRequest): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["collection_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["collectionId"] = 'collection_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("document_id", "mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "collectionId" in jsonified_request + assert jsonified_request["collectionId"] == 'collection_id_value' + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_document(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_document_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(("documentId", "mask", )) & set(("parent", "collectionId", "document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_document_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreRestInterceptor, "post_create_document") as post, \ + mock.patch.object(transports.FirestoreRestInterceptor, "pre_create_document") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore.CreateDocumentRequest.pb(firestore.CreateDocumentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = firestore.CreateDocumentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.create_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_document_rest_bad_request(transport: str = 'rest', request_type=firestore.CreateDocumentRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/documents/sample3', 'collection_id': 'sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_document(request) + + +def test_create_document_rest_error(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FirestoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FirestoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirestoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FirestoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirestoreClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirestoreClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FirestoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirestoreClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirestoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FirestoreClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirestoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.FirestoreGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.FirestoreGrpcTransport, + transports.FirestoreGrpcAsyncIOTransport, + transports.FirestoreRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = FirestoreClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.FirestoreGrpcTransport, + ) + +def test_firestore_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.FirestoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_firestore_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.FirestoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get_document', + 'list_documents', + 'update_document', + 'delete_document', + 'batch_get_documents', + 'begin_transaction', + 'commit', + 'rollback', + 'run_query', + 'run_aggregation_query', + 'partition_query', + 'write', + 'listen', + 'list_collection_ids', + 'batch_write', + 'create_document', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_firestore_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirestoreTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', +), + quota_project_id="octopus", + ) + + +def test_firestore_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirestoreTransport() + adc.assert_called_once() + + +def test_firestore_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FirestoreClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirestoreGrpcTransport, + transports.FirestoreGrpcAsyncIOTransport, + ], +) +def test_firestore_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/datastore',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirestoreGrpcTransport, + transports.FirestoreGrpcAsyncIOTransport, + transports.FirestoreRestTransport, + ], +) +def test_firestore_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FirestoreGrpcTransport, grpc_helpers), + (transports.FirestoreGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_firestore_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', +), + scopes=["1", "2"], + default_host="firestore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport]) +def test_firestore_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_firestore_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.FirestoreRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_firestore_host_no_port(transport_name): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='firestore.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'firestore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://firestore.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_firestore_host_with_port(transport_name): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='firestore.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'firestore.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://firestore.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_firestore_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FirestoreClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FirestoreClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_document._session + session2 = client2.transport.get_document._session + assert session1 != session2 + session1 = client1.transport.list_documents._session + session2 = client2.transport.list_documents._session + assert session1 != session2 + session1 = client1.transport.update_document._session + session2 = client2.transport.update_document._session + assert session1 != session2 + session1 = client1.transport.delete_document._session + session2 = client2.transport.delete_document._session + assert session1 != session2 + session1 = client1.transport.batch_get_documents._session + session2 = client2.transport.batch_get_documents._session + assert session1 != session2 + session1 = client1.transport.begin_transaction._session + session2 = client2.transport.begin_transaction._session + assert session1 != session2 + session1 = client1.transport.commit._session + session2 = client2.transport.commit._session + assert session1 != session2 + session1 = client1.transport.rollback._session + session2 = client2.transport.rollback._session + assert session1 != session2 + session1 = client1.transport.run_query._session + session2 = client2.transport.run_query._session + assert session1 != session2 + session1 = client1.transport.run_aggregation_query._session + session2 = client2.transport.run_aggregation_query._session + assert session1 != session2 + session1 = client1.transport.partition_query._session + session2 = client2.transport.partition_query._session + assert session1 != session2 + session1 = client1.transport.write._session + session2 = client2.transport.write._session + assert session1 != session2 + session1 = client1.transport.listen._session + session2 = client2.transport.listen._session + assert session1 != session2 + session1 = client1.transport.list_collection_ids._session + session2 = client2.transport.list_collection_ids._session + assert session1 != session2 + session1 = client1.transport.batch_write._session + session2 = client2.transport.batch_write._session + assert session1 != session2 + session1 = client1.transport.create_document._session + session2 = client2.transport.create_document._session + assert session1 != session2 +def test_firestore_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FirestoreGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_firestore_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FirestoreGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport]) +def test_firestore_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport]) +def test_firestore_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = FirestoreClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = FirestoreClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = FirestoreClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = FirestoreClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = FirestoreClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = FirestoreClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = FirestoreClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = FirestoreClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = FirestoreClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = FirestoreClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.FirestoreTransport, '_prep_wrapped_messages') as prep: + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.FirestoreTransport, '_prep_wrapped_messages') as prep: + transport_class = FirestoreClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = FirestoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (FirestoreClient, transports.FirestoreGrpcTransport), + (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/firestore_admin/v1/.coveragerc b/owl-bot-staging/firestore_admin/v1/.coveragerc new file mode 100644 index 0000000000..2f9de152b2 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/firestore_admin/__init__.py + google/cloud/firestore_admin/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/firestore_admin/v1/.flake8 b/owl-bot-staging/firestore_admin/v1/.flake8 new file mode 100644 index 0000000000..29227d4cf4 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/firestore_admin/v1/MANIFEST.in b/owl-bot-staging/firestore_admin/v1/MANIFEST.in new file mode 100644 index 0000000000..b3d50a1888 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/firestore_admin *.py +recursive-include google/cloud/firestore_admin_v1 *.py diff --git a/owl-bot-staging/firestore_admin/v1/README.rst b/owl-bot-staging/firestore_admin/v1/README.rst new file mode 100644 index 0000000000..8704400bdb --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Firestore Admin API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Firestore Admin API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/firestore_admin/v1/docs/_static/custom.css b/owl-bot-staging/firestore_admin/v1/docs/_static/custom.css new file mode 100644 index 0000000000..06423be0b5 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/firestore_admin/v1/docs/conf.py b/owl-bot-staging/firestore_admin/v1/docs/conf.py new file mode 100644 index 0000000000..5a3c49c61d --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-firestore-admin documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-firestore-admin" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-firestore-admin-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-firestore-admin.tex", + u"google-cloud-firestore-admin Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-firestore-admin", + u"Google Cloud Firestore Admin Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-firestore-admin", + u"google-cloud-firestore-admin Documentation", + author, + "google-cloud-firestore-admin", + "GAPIC library for Google Cloud Firestore Admin API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/firestore_admin.rst b/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/firestore_admin.rst new file mode 100644 index 0000000000..59a8ccdfb5 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/firestore_admin.rst @@ -0,0 +1,10 @@ +FirestoreAdmin +-------------------------------- + +.. automodule:: google.cloud.firestore_admin_v1.services.firestore_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.firestore_admin_v1.services.firestore_admin.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/services_.rst b/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/services_.rst new file mode 100644 index 0000000000..24782d194c --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Firestore Admin v1 API +================================================ +.. toctree:: + :maxdepth: 2 + + firestore_admin diff --git a/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/types_.rst b/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/types_.rst new file mode 100644 index 0000000000..9396fc9eaf --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Firestore Admin v1 API +============================================= + +.. automodule:: google.cloud.firestore_admin_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/firestore_admin/v1/docs/index.rst b/owl-bot-staging/firestore_admin/v1/docs/index.rst new file mode 100644 index 0000000000..701eea6e53 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + firestore_admin_v1/services + firestore_admin_v1/types diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/__init__.py new file mode 100644 index 0000000000..02f3da2116 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/__init__.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.firestore_admin import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.firestore_admin_v1.services.firestore_admin.client import FirestoreAdminClient +from google.cloud.firestore_admin_v1.services.firestore_admin.async_client import FirestoreAdminAsyncClient + +from google.cloud.firestore_admin_v1.types.backup import Backup +from google.cloud.firestore_admin_v1.types.database import Database +from google.cloud.firestore_admin_v1.types.field import Field +from google.cloud.firestore_admin_v1.types.firestore_admin import CreateBackupScheduleRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import CreateDatabaseMetadata +from google.cloud.firestore_admin_v1.types.firestore_admin import CreateDatabaseRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import CreateIndexRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteBackupRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteBackupScheduleRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteDatabaseMetadata +from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteDatabaseRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteIndexRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import ExportDocumentsRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import GetBackupRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import GetBackupScheduleRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import GetDatabaseRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import GetFieldRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import GetIndexRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import ImportDocumentsRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import ListBackupSchedulesRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import ListBackupSchedulesResponse +from google.cloud.firestore_admin_v1.types.firestore_admin import ListBackupsRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import ListBackupsResponse +from google.cloud.firestore_admin_v1.types.firestore_admin import ListDatabasesRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import ListDatabasesResponse +from google.cloud.firestore_admin_v1.types.firestore_admin import ListFieldsRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import ListFieldsResponse +from google.cloud.firestore_admin_v1.types.firestore_admin import ListIndexesRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import ListIndexesResponse +from google.cloud.firestore_admin_v1.types.firestore_admin import RestoreDatabaseRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import UpdateBackupScheduleRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import UpdateDatabaseMetadata +from google.cloud.firestore_admin_v1.types.firestore_admin import UpdateDatabaseRequest +from google.cloud.firestore_admin_v1.types.firestore_admin import UpdateFieldRequest +from google.cloud.firestore_admin_v1.types.index import Index +from google.cloud.firestore_admin_v1.types.location import LocationMetadata +from google.cloud.firestore_admin_v1.types.operation import ExportDocumentsMetadata +from google.cloud.firestore_admin_v1.types.operation import ExportDocumentsResponse +from google.cloud.firestore_admin_v1.types.operation import FieldOperationMetadata +from google.cloud.firestore_admin_v1.types.operation import ImportDocumentsMetadata +from google.cloud.firestore_admin_v1.types.operation import IndexOperationMetadata +from google.cloud.firestore_admin_v1.types.operation import Progress +from google.cloud.firestore_admin_v1.types.operation import RestoreDatabaseMetadata +from google.cloud.firestore_admin_v1.types.operation import OperationState +from google.cloud.firestore_admin_v1.types.schedule import BackupSchedule +from google.cloud.firestore_admin_v1.types.schedule import DailyRecurrence +from google.cloud.firestore_admin_v1.types.schedule import WeeklyRecurrence + +__all__ = ('FirestoreAdminClient', + 'FirestoreAdminAsyncClient', + 'Backup', + 'Database', + 'Field', + 'CreateBackupScheduleRequest', + 'CreateDatabaseMetadata', + 'CreateDatabaseRequest', + 'CreateIndexRequest', + 'DeleteBackupRequest', + 'DeleteBackupScheduleRequest', + 'DeleteDatabaseMetadata', + 'DeleteDatabaseRequest', + 'DeleteIndexRequest', + 'ExportDocumentsRequest', + 'GetBackupRequest', + 'GetBackupScheduleRequest', + 'GetDatabaseRequest', + 'GetFieldRequest', + 'GetIndexRequest', + 'ImportDocumentsRequest', + 'ListBackupSchedulesRequest', + 'ListBackupSchedulesResponse', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'ListFieldsRequest', + 'ListFieldsResponse', + 'ListIndexesRequest', + 'ListIndexesResponse', + 'RestoreDatabaseRequest', + 'UpdateBackupScheduleRequest', + 'UpdateDatabaseMetadata', + 'UpdateDatabaseRequest', + 'UpdateFieldRequest', + 'Index', + 'LocationMetadata', + 'ExportDocumentsMetadata', + 'ExportDocumentsResponse', + 'FieldOperationMetadata', + 'ImportDocumentsMetadata', + 'IndexOperationMetadata', + 'Progress', + 'RestoreDatabaseMetadata', + 'OperationState', + 'BackupSchedule', + 'DailyRecurrence', + 'WeeklyRecurrence', +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/gapic_version.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/gapic_version.py new file mode 100644 index 0000000000..558c8aab67 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/py.typed b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/py.typed new file mode 100644 index 0000000000..f7a4796eee --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-firestore-admin package uses inline types. diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/__init__.py new file mode 100644 index 0000000000..d26d37256c --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/__init__.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.firestore_admin_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.firestore_admin import FirestoreAdminClient +from .services.firestore_admin import FirestoreAdminAsyncClient + +from .types.backup import Backup +from .types.database import Database +from .types.field import Field +from .types.firestore_admin import CreateBackupScheduleRequest +from .types.firestore_admin import CreateDatabaseMetadata +from .types.firestore_admin import CreateDatabaseRequest +from .types.firestore_admin import CreateIndexRequest +from .types.firestore_admin import DeleteBackupRequest +from .types.firestore_admin import DeleteBackupScheduleRequest +from .types.firestore_admin import DeleteDatabaseMetadata +from .types.firestore_admin import DeleteDatabaseRequest +from .types.firestore_admin import DeleteIndexRequest +from .types.firestore_admin import ExportDocumentsRequest +from .types.firestore_admin import GetBackupRequest +from .types.firestore_admin import GetBackupScheduleRequest +from .types.firestore_admin import GetDatabaseRequest +from .types.firestore_admin import GetFieldRequest +from .types.firestore_admin import GetIndexRequest +from .types.firestore_admin import ImportDocumentsRequest +from .types.firestore_admin import ListBackupSchedulesRequest +from .types.firestore_admin import ListBackupSchedulesResponse +from .types.firestore_admin import ListBackupsRequest +from .types.firestore_admin import ListBackupsResponse +from .types.firestore_admin import ListDatabasesRequest +from .types.firestore_admin import ListDatabasesResponse +from .types.firestore_admin import ListFieldsRequest +from .types.firestore_admin import ListFieldsResponse +from .types.firestore_admin import ListIndexesRequest +from .types.firestore_admin import ListIndexesResponse +from .types.firestore_admin import RestoreDatabaseRequest +from .types.firestore_admin import UpdateBackupScheduleRequest +from .types.firestore_admin import UpdateDatabaseMetadata +from .types.firestore_admin import UpdateDatabaseRequest +from .types.firestore_admin import UpdateFieldRequest +from .types.index import Index +from .types.location import LocationMetadata +from .types.operation import ExportDocumentsMetadata +from .types.operation import ExportDocumentsResponse +from .types.operation import FieldOperationMetadata +from .types.operation import ImportDocumentsMetadata +from .types.operation import IndexOperationMetadata +from .types.operation import Progress +from .types.operation import RestoreDatabaseMetadata +from .types.operation import OperationState +from .types.schedule import BackupSchedule +from .types.schedule import DailyRecurrence +from .types.schedule import WeeklyRecurrence + +__all__ = ( + 'FirestoreAdminAsyncClient', +'Backup', +'BackupSchedule', +'CreateBackupScheduleRequest', +'CreateDatabaseMetadata', +'CreateDatabaseRequest', +'CreateIndexRequest', +'DailyRecurrence', +'Database', +'DeleteBackupRequest', +'DeleteBackupScheduleRequest', +'DeleteDatabaseMetadata', +'DeleteDatabaseRequest', +'DeleteIndexRequest', +'ExportDocumentsMetadata', +'ExportDocumentsRequest', +'ExportDocumentsResponse', +'Field', +'FieldOperationMetadata', +'FirestoreAdminClient', +'GetBackupRequest', +'GetBackupScheduleRequest', +'GetDatabaseRequest', +'GetFieldRequest', +'GetIndexRequest', +'ImportDocumentsMetadata', +'ImportDocumentsRequest', +'Index', +'IndexOperationMetadata', +'ListBackupSchedulesRequest', +'ListBackupSchedulesResponse', +'ListBackupsRequest', +'ListBackupsResponse', +'ListDatabasesRequest', +'ListDatabasesResponse', +'ListFieldsRequest', +'ListFieldsResponse', +'ListIndexesRequest', +'ListIndexesResponse', +'LocationMetadata', +'OperationState', +'Progress', +'RestoreDatabaseMetadata', +'RestoreDatabaseRequest', +'UpdateBackupScheduleRequest', +'UpdateDatabaseMetadata', +'UpdateDatabaseRequest', +'UpdateFieldRequest', +'WeeklyRecurrence', +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_metadata.json b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_metadata.json new file mode 100644 index 0000000000..73f37c4180 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_metadata.json @@ -0,0 +1,373 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.firestore_admin_v1", + "protoPackage": "google.firestore.admin.v1", + "schema": "1.0", + "services": { + "FirestoreAdmin": { + "clients": { + "grpc": { + "libraryClient": "FirestoreAdminClient", + "rpcs": { + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "CreateIndex": { + "methods": [ + "create_index" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, + "DeleteDatabase": { + "methods": [ + "delete_database" + ] + }, + "DeleteIndex": { + "methods": [ + "delete_index" + ] + }, + "ExportDocuments": { + "methods": [ + "export_documents" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetField": { + "methods": [ + "get_field" + ] + }, + "GetIndex": { + "methods": [ + "get_index" + ] + }, + "ImportDocuments": { + "methods": [ + "import_documents" + ] + }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "ListFields": { + "methods": [ + "list_fields" + ] + }, + "ListIndexes": { + "methods": [ + "list_indexes" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateField": { + "methods": [ + "update_field" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FirestoreAdminAsyncClient", + "rpcs": { + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "CreateIndex": { + "methods": [ + "create_index" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, + "DeleteDatabase": { + "methods": [ + "delete_database" + ] + }, + "DeleteIndex": { + "methods": [ + "delete_index" + ] + }, + "ExportDocuments": { + "methods": [ + "export_documents" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetField": { + "methods": [ + "get_field" + ] + }, + "GetIndex": { + "methods": [ + "get_index" + ] + }, + "ImportDocuments": { + "methods": [ + "import_documents" + ] + }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "ListFields": { + "methods": [ + "list_fields" + ] + }, + "ListIndexes": { + "methods": [ + "list_indexes" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateField": { + "methods": [ + "update_field" + ] + } + } + }, + "rest": { + "libraryClient": "FirestoreAdminClient", + "rpcs": { + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "CreateIndex": { + "methods": [ + "create_index" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, + "DeleteDatabase": { + "methods": [ + "delete_database" + ] + }, + "DeleteIndex": { + "methods": [ + "delete_index" + ] + }, + "ExportDocuments": { + "methods": [ + "export_documents" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetField": { + "methods": [ + "get_field" + ] + }, + "GetIndex": { + "methods": [ + "get_index" + ] + }, + "ImportDocuments": { + "methods": [ + "import_documents" + ] + }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "ListFields": { + "methods": [ + "list_fields" + ] + }, + "ListIndexes": { + "methods": [ + "list_indexes" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateField": { + "methods": [ + "update_field" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_version.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_version.py new file mode 100644 index 0000000000..558c8aab67 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/py.typed b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/py.typed new file mode 100644 index 0000000000..f7a4796eee --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-firestore-admin package uses inline types. diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/__init__.py new file mode 100644 index 0000000000..8f6cf06824 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py new file mode 100644 index 0000000000..7d14cb399e --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import FirestoreAdminClient +from .async_client import FirestoreAdminAsyncClient + +__all__ = ( + 'FirestoreAdminClient', + 'FirestoreAdminAsyncClient', +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py new file mode 100644 index 0000000000..11f8a357df --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py @@ -0,0 +1,3202 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.firestore_admin_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation as gac_operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.firestore_admin_v1.services.firestore_admin import pagers +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import database as gfa_database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import field as gfa_field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import index as gfa_index +from google.cloud.firestore_admin_v1.types import operation as gfa_operation +from google.cloud.firestore_admin_v1.types import schedule +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport +from .client import FirestoreAdminClient + + +class FirestoreAdminAsyncClient: + """The Cloud Firestore Admin API. + + This API provides several administrative services for Cloud + Firestore. + + Project, Database, Namespace, Collection, Collection Group, and + Document are used as defined in the Google Cloud Firestore API. + + Operation: An Operation represents work being performed in the + background. + + The index service manages Cloud Firestore indexes. + + Index creation is performed asynchronously. An Operation resource is + created for each such asynchronous operation. The state of the + operation (including any errors encountered) may be queried via the + Operation resource. + + The Operations collection provides a record of actions performed for + the specified Project (including any Operations in progress). + Operations are not created directly but through calls on other + collections or resources. + + An Operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. Operations are garbage + collected after 30 days. By default, ListOperations will only return + in progress and failed operations. To list completed operation, + issue a ListOperations request with the filter ``done: true``. + + Operations are created by service ``FirestoreAdmin``, but are + accessed via service ``google.longrunning.Operations``. + """ + + _client: FirestoreAdminClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = FirestoreAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = FirestoreAdminClient._DEFAULT_UNIVERSE + + backup_path = staticmethod(FirestoreAdminClient.backup_path) + parse_backup_path = staticmethod(FirestoreAdminClient.parse_backup_path) + backup_schedule_path = staticmethod(FirestoreAdminClient.backup_schedule_path) + parse_backup_schedule_path = staticmethod(FirestoreAdminClient.parse_backup_schedule_path) + collection_group_path = staticmethod(FirestoreAdminClient.collection_group_path) + parse_collection_group_path = staticmethod(FirestoreAdminClient.parse_collection_group_path) + database_path = staticmethod(FirestoreAdminClient.database_path) + parse_database_path = staticmethod(FirestoreAdminClient.parse_database_path) + field_path = staticmethod(FirestoreAdminClient.field_path) + parse_field_path = staticmethod(FirestoreAdminClient.parse_field_path) + index_path = staticmethod(FirestoreAdminClient.index_path) + parse_index_path = staticmethod(FirestoreAdminClient.parse_index_path) + location_path = staticmethod(FirestoreAdminClient.location_path) + parse_location_path = staticmethod(FirestoreAdminClient.parse_location_path) + common_billing_account_path = staticmethod(FirestoreAdminClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(FirestoreAdminClient.parse_common_billing_account_path) + common_folder_path = staticmethod(FirestoreAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(FirestoreAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(FirestoreAdminClient.common_organization_path) + parse_common_organization_path = staticmethod(FirestoreAdminClient.parse_common_organization_path) + common_project_path = staticmethod(FirestoreAdminClient.common_project_path) + parse_common_project_path = staticmethod(FirestoreAdminClient.parse_common_project_path) + common_location_path = staticmethod(FirestoreAdminClient.common_location_path) + parse_common_location_path = staticmethod(FirestoreAdminClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirestoreAdminAsyncClient: The constructed client. + """ + return FirestoreAdminClient.from_service_account_info.__func__(FirestoreAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirestoreAdminAsyncClient: The constructed client. + """ + return FirestoreAdminClient.from_service_account_file.__func__(FirestoreAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return FirestoreAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> FirestoreAdminTransport: + """Returns the transport used by the client instance. + + Returns: + FirestoreAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial(type(FirestoreAdminClient).get_transport_class, type(FirestoreAdminClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, FirestoreAdminTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the firestore admin async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.FirestoreAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = FirestoreAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_index(self, + request: Optional[Union[firestore_admin.CreateIndexRequest, dict]] = None, + *, + parent: Optional[str] = None, + index: Optional[gfa_index.Index] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a composite index. This returns a + [google.longrunning.Operation][google.longrunning.Operation] + which may be used to track the status of the creation. The + metadata for the operation will be the type + [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_create_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateIndexRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_index(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateIndexRequest, dict]]): + The request object. The request for + [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. + parent (:class:`str`): + Required. A parent name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + index (:class:`google.cloud.firestore_admin_v1.types.Index`): + Required. The composite index to + create. + + This corresponds to the ``index`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.Index` Cloud Firestore indexes enable simple and complex queries against + documents in a database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, index]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.CreateIndexRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if index is not None: + request.index = index + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_index, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gfa_index.Index, + metadata_type=gfa_operation.IndexOperationMetadata, + ) + + # Done; return the response. + return response + + async def list_indexes(self, + request: Optional[Union[firestore_admin.ListIndexesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListIndexesAsyncPager: + r"""Lists composite indexes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_list_indexes(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListIndexesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_indexes(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.ListIndexesRequest, dict]]): + The request object. The request for + [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. + parent (:class:`str`): + Required. A parent name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesAsyncPager: + The response for + [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.ListIndexesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_indexes, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListIndexesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_index(self, + request: Optional[Union[firestore_admin.GetIndexRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> index.Index: + r"""Gets a composite index. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_get_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetIndexRequest( + name="name_value", + ) + + # Make the request + response = await client.get_index(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.GetIndexRequest, dict]]): + The request object. The request for + [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. + name (:class:`str`): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.Index: + Cloud Firestore indexes enable simple + and complex queries against documents in + a database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.GetIndexRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_index, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_index(self, + request: Optional[Union[firestore_admin.DeleteIndexRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a composite index. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_delete_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteIndexRequest( + name="name_value", + ) + + # Make the request + await client.delete_index(request=request) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteIndexRequest, dict]]): + The request object. The request for + [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. + name (:class:`str`): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.DeleteIndexRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_index, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_field(self, + request: Optional[Union[firestore_admin.GetFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> field.Field: + r"""Gets the metadata and configuration for a Field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_get_field(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetFieldRequest( + name="name_value", + ) + + # Make the request + response = await client.get_field(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.GetFieldRequest, dict]]): + The request object. The request for + [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. + name (:class:`str`): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.Field: + Represents a single field in the + database. + Fields are grouped by their "Collection + Group", which represent all collections + in the database with the same id. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.GetFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_field, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_field(self, + request: Optional[Union[firestore_admin.UpdateFieldRequest, dict]] = None, + *, + field: Optional[gfa_field.Field] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a field configuration. Currently, field updates apply + only to single field index configuration. However, calls to + [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] + should provide a field mask to avoid changing any configuration + that the caller isn't aware of. The field mask should be + specified as: ``{ paths: "index_config" }``. + + This call returns a + [google.longrunning.Operation][google.longrunning.Operation] + which may be used to track the status of the field update. The + metadata for the operation will be the type + [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. + + To configure the default field settings for the database, use + the special ``Field`` with resource name: + ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_update_field(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + field = firestore_admin_v1.Field() + field.name = "name_value" + + request = firestore_admin_v1.UpdateFieldRequest( + field=field, + ) + + # Make the request + operation = client.update_field(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.UpdateFieldRequest, dict]]): + The request object. The request for + [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. + field (:class:`google.cloud.firestore_admin_v1.types.Field`): + Required. The field to be updated. + This corresponds to the ``field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Field` + Represents a single field in the database. + + Fields are grouped by their "Collection Group", which + represent all collections in the database with the + same id. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([field]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.UpdateFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if field is not None: + request.field = field + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_field, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("field.name", request.field.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gfa_field.Field, + metadata_type=gfa_operation.FieldOperationMetadata, + ) + + # Done; return the response. + return response + + async def list_fields(self, + request: Optional[Union[firestore_admin.ListFieldsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListFieldsAsyncPager: + r"""Lists the field configuration and metadata for this database. + + Currently, + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + only supports listing fields that have been explicitly + overridden. To issue this query, call + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + with the filter set to ``indexConfig.usesAncestorConfig:false`` + or ``ttlConfig:*``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_list_fields(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListFieldsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_fields(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.ListFieldsRequest, dict]]): + The request object. The request for + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. + parent (:class:`str`): + Required. A parent name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsAsyncPager: + The response for + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.ListFieldsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_fields, + default_retry=retries.AsyncRetry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListFieldsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def export_documents(self, + request: Optional[Union[firestore_admin.ExportDocumentsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports a copy of all or a subset of documents from + Google Cloud Firestore to another storage system, such + as Google Cloud Storage. Recent updates to documents may + not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed + via the Operation resource that is created. The output + of an export may only be used once the associated + operation is done. If an export operation is cancelled + before completion it may leave partial data behind in + Google Cloud Storage. + + For more details on export behavior and output format, + refer to: + + https://cloud.google.com/firestore/docs/manage-data/export-import + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_export_documents(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ExportDocumentsRequest( + name="name_value", + ) + + # Make the request + operation = client.export_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.ExportDocumentsRequest, dict]]): + The request object. The request for + [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. + name (:class:`str`): + Required. Database to export. Should be of the form: + ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.ExportDocumentsResponse` Returned in the [google.longrunning.Operation][google.longrunning.Operation] + response field. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.ExportDocumentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_documents, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gfa_operation.ExportDocumentsResponse, + metadata_type=gfa_operation.ExportDocumentsMetadata, + ) + + # Done; return the response. + return response + + async def import_documents(self, + request: Optional[Union[firestore_admin.ImportDocumentsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports documents into Google Cloud Firestore. + Existing documents with the same name are overwritten. + The import occurs in the background and its progress can + be monitored and managed via the Operation resource that + is created. If an ImportDocuments operation is + cancelled, it is possible that a subset of the data has + already been imported to Cloud Firestore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_import_documents(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ImportDocumentsRequest( + name="name_value", + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.ImportDocumentsRequest, dict]]): + The request object. The request for + [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. + name (:class:`str`): + Required. Database to import into. Should be of the + form: ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.ImportDocumentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_documents, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=gfa_operation.ImportDocumentsMetadata, + ) + + # Done; return the response. + return response + + async def create_database(self, + request: Optional[Union[firestore_admin.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[gfa_database.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_create_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateDatabaseRequest, dict]]): + The request object. The request for + [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. + parent (:class:`str`): + Required. A parent name of the form + ``projects/{project_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (:class:`google.cloud.firestore_admin_v1.types.Database`): + Required. The Database to create. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (:class:`str`): + Required. The ID to use for the database, which will + become the final component of the database's resource + name. + + This value should be 4-63 characters. Valid characters + are /[a-z][0-9]-/ with first character a letter and the + last a letter or a number. Must not be UUID-like + /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. + + "(default)" database id is also valid. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Database` + A Cloud Firestore Database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, database, database_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.CreateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gfa_database.Database, + metadata_type=firestore_admin.CreateDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def get_database(self, + request: Optional[Union[firestore_admin.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> database.Database: + r"""Gets information about a database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_get_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.GetDatabaseRequest, dict]]): + The request object. The request for + [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. + name (:class:`str`): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.Database: + A Cloud Firestore Database. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.GetDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_databases(self, + request: Optional[Union[firestore_admin.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore_admin.ListDatabasesResponse: + r"""List all the databases in the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_list_databases(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_databases(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.ListDatabasesRequest, dict]]): + The request object. A request to list the Firestore + Databases in all locations for a + project. + parent (:class:`str`): + Required. A parent name of the form + ``projects/{project_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.ListDatabasesResponse: + The list of databases for a project. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.ListDatabasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_databases, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_database(self, + request: Optional[Union[firestore_admin.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[gfa_database.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_update_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.UpdateDatabaseRequest( + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest, dict]]): + The request object. The request for + [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. + database (:class:`google.cloud.firestore_admin_v1.types.Database`): + Required. The database to update. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to be updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Database` + A Cloud Firestore Database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.UpdateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database.name", request.database.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gfa_database.Database, + metadata_type=firestore_admin.UpdateDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def delete_database(self, + request: Optional[Union[firestore_admin.DeleteDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_delete_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest, dict]]): + The request object. The request for + [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. + name (:class:`str`): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Database` + A Cloud Firestore Database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.DeleteDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + database.Database, + metadata_type=firestore_admin.DeleteDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def get_backup(self, + request: Optional[Union[firestore_admin.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup.Backup: + r"""Gets information about a backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_get_backup(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.GetBackupRequest, dict]]): + The request object. The request for + [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. + name (:class:`str`): + Required. Name of the backup to fetch. + + Format is + ``projects/{project}/locations/{location}/backups/{backup}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.Backup: + A Backup of a Cloud Firestore + Database. + The backup contains all documents and + index configurations for the given + database at a specific point in time. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backups(self, + request: Optional[Union[firestore_admin.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore_admin.ListBackupsResponse: + r"""Lists all the backups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_list_backups(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_backups(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.ListBackupsRequest, dict]]): + The request object. The request for + [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. + parent (:class:`str`): + Required. The location to list backups from. + + Format is ``projects/{project}/locations/{location}``. + Use ``{location} = '-'`` to list backups from all + locations for the given project. This allows listing + backups from a single location or from all locations. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.ListBackupsResponse: + The response for + [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backups, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup(self, + request: Optional[Union[firestore_admin.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_delete_backup(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup(request=request) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteBackupRequest, dict]]): + The request object. The request for + [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. + name (:class:`str`): + Required. Name of the backup to delete. + + format is + ``projects/{project}/locations/{location}/backups/{backup}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def restore_database(self, + request: Optional[Union[firestore_admin.RestoreDatabaseRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new database by restoring from an existing backup. + + The new database must be in the same cloud region or + multi-region location as the existing backup. This behaves + similar to + [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.CreateDatabase] + except instead of creating a new empty database, a new database + is created with the database type, index configuration, and + documents from an existing backup. + + The [long-running operation][google.longrunning.Operation] can + be used to track the progress of the restore, with the + Operation's [metadata][google.longrunning.Operation.metadata] + field type being the + [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + the [Database][google.firestore.admin.v1.Database] if the + restore was successful. The new database is not readable or + writeable until the LRO has completed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_restore_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.RestoreDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + backup="backup_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest, dict]]): + The request object. The request message for + [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.RestoreDatabase]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Database` + A Cloud Firestore Database. + + """ + # Create or coerce a protobuf request object. + request = firestore_admin.RestoreDatabaseRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restore_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + database.Database, + metadata_type=gfa_operation.RestoreDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_schedule(self, + request: Optional[Union[firestore_admin.CreateBackupScheduleRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_schedule: Optional[schedule.BackupSchedule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schedule.BackupSchedule: + r"""Creates a backup schedule on a database. + At most two backup schedules can be configured on a + database, one daily backup schedule and one weekly + backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_create_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateBackupScheduleRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest, dict]]): + The request object. The request for + [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. + parent (:class:`str`): + Required. The parent database. + + Format ``projects/{project}/databases/{database}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule (:class:`google.cloud.firestore_admin_v1.types.BackupSchedule`): + Required. The backup schedule to + create. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.BackupSchedule: + A backup schedule for a Cloud + Firestore Database. + This resource is owned by the database + it is backing up, and is deleted along + with the database. The actual backups + are not though. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_schedule]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.CreateBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_schedule is not None: + request.backup_schedule = backup_schedule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_backup_schedule, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup_schedule(self, + request: Optional[Union[firestore_admin.GetBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schedule.BackupSchedule: + r"""Gets information about a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_get_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest, dict]]): + The request object. The request for + [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. + name (:class:`str`): + Required. The name of the backup schedule. + + Format + ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.BackupSchedule: + A backup schedule for a Cloud + Firestore Database. + This resource is owned by the database + it is backing up, and is deleted along + with the database. The actual backups + are not though. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.GetBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup_schedule, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_schedules(self, + request: Optional[Union[firestore_admin.ListBackupSchedulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore_admin.ListBackupSchedulesResponse: + r"""List backup schedules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_list_backup_schedules(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_backup_schedules(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest, dict]]): + The request object. The request for + [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. + parent (:class:`str`): + Required. The parent database. + + Format is ``projects/{project}/databases/{database}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse: + The response for + [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.ListBackupSchedulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backup_schedules, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_schedule(self, + request: Optional[Union[firestore_admin.UpdateBackupScheduleRequest, dict]] = None, + *, + backup_schedule: Optional[schedule.BackupSchedule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schedule.BackupSchedule: + r"""Updates a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_update_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = await client.update_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest, dict]]): + The request object. The request for + [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. + backup_schedule (:class:`google.cloud.firestore_admin_v1.types.BackupSchedule`): + Required. The backup schedule to + update. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to be updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.BackupSchedule: + A backup schedule for a Cloud + Firestore Database. + This resource is owned by the database + it is backing up, and is deleted along + with the database. The actual backups + are not though. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_schedule, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.UpdateBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_backup_schedule, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup_schedule.name", request.backup_schedule.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_schedule(self, + request: Optional[Union[firestore_admin.DeleteBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + async def sample_delete_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup_schedule(request=request) + + Args: + request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest, dict]]): + The request object. The request for + [FirestoreAdmin.DeleteBackupSchedules][]. + name (:class:`str`): + Required. The name of the backup schedule. + + Format + ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = firestore_admin.DeleteBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup_schedule, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "FirestoreAdminAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "FirestoreAdminAsyncClient", +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/client.py new file mode 100644 index 0000000000..7ce70b29bc --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/client.py @@ -0,0 +1,3570 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.firestore_admin_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation as gac_operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.firestore_admin_v1.services.firestore_admin import pagers +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import database as gfa_database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import field as gfa_field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import index as gfa_index +from google.cloud.firestore_admin_v1.types import operation as gfa_operation +from google.cloud.firestore_admin_v1.types import schedule +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import FirestoreAdminGrpcTransport +from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport +from .transports.rest import FirestoreAdminRestTransport + + +class FirestoreAdminClientMeta(type): + """Metaclass for the FirestoreAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreAdminTransport]] + _transport_registry["grpc"] = FirestoreAdminGrpcTransport + _transport_registry["grpc_asyncio"] = FirestoreAdminGrpcAsyncIOTransport + _transport_registry["rest"] = FirestoreAdminRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[FirestoreAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FirestoreAdminClient(metaclass=FirestoreAdminClientMeta): + """The Cloud Firestore Admin API. + + This API provides several administrative services for Cloud + Firestore. + + Project, Database, Namespace, Collection, Collection Group, and + Document are used as defined in the Google Cloud Firestore API. + + Operation: An Operation represents work being performed in the + background. + + The index service manages Cloud Firestore indexes. + + Index creation is performed asynchronously. An Operation resource is + created for each such asynchronous operation. The state of the + operation (including any errors encountered) may be queried via the + Operation resource. + + The Operations collection provides a record of actions performed for + the specified Project (including any Operations in progress). + Operations are not created directly but through calls on other + collections or resources. + + An Operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. Operations are garbage + collected after 30 days. By default, ListOperations will only return + in progress and failed operations. To list completed operation, + issue a ListOperations request with the filter ``done: true``. + + Operations are created by service ``FirestoreAdmin``, but are + accessed via service ``google.longrunning.Operations``. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "firestore.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "firestore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirestoreAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirestoreAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FirestoreAdminTransport: + """Returns the transport used by the client instance. + + Returns: + FirestoreAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def backup_path(project: str,location: str,backup: str,) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backups/{backup}".format(project=project, location=location, backup=backup, ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str,str]: + """Parses a backup path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/backups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def backup_schedule_path(project: str,database: str,backup_schedule: str,) -> str: + """Returns a fully-qualified backup_schedule string.""" + return "projects/{project}/databases/{database}/backupSchedules/{backup_schedule}".format(project=project, database=database, backup_schedule=backup_schedule, ) + + @staticmethod + def parse_backup_schedule_path(path: str) -> Dict[str,str]: + """Parses a backup_schedule path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)/backupSchedules/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def collection_group_path(project: str,database: str,collection: str,) -> str: + """Returns a fully-qualified collection_group string.""" + return "projects/{project}/databases/{database}/collectionGroups/{collection}".format(project=project, database=database, collection=collection, ) + + @staticmethod + def parse_collection_group_path(path: str) -> Dict[str,str]: + """Parses a collection_group path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def database_path(project: str,database: str,) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/databases/{database}".format(project=project, database=database, ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str,str]: + """Parses a database path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def field_path(project: str,database: str,collection: str,field: str,) -> str: + """Returns a fully-qualified field string.""" + return "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format(project=project, database=database, collection=collection, field=field, ) + + @staticmethod + def parse_field_path(path: str) -> Dict[str,str]: + """Parses a field path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/fields/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def index_path(project: str,database: str,collection: str,index: str,) -> str: + """Returns a fully-qualified index string.""" + return "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format(project=project, database=database, collection=collection, index=index, ) + + @staticmethod + def parse_index_path(path: str) -> Dict[str,str]: + """Parses a index path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/indexes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def location_path(project: str,location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_location_path(path: str) -> Dict[str,str]: + """Parses a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = FirestoreAdminClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + FirestoreAdminClient._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, FirestoreAdminTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the firestore admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, FirestoreAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = FirestoreAdminClient._read_environment_variables() + self._client_cert_source = FirestoreAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = FirestoreAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, FirestoreAdminTransport) + if transport_provided: + # transport is a FirestoreAdminTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(FirestoreAdminTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + FirestoreAdminClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(cast(str, transport)) + self._transport = Transport( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_index(self, + request: Optional[Union[firestore_admin.CreateIndexRequest, dict]] = None, + *, + parent: Optional[str] = None, + index: Optional[gfa_index.Index] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Creates a composite index. This returns a + [google.longrunning.Operation][google.longrunning.Operation] + which may be used to track the status of the creation. The + metadata for the operation will be the type + [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_create_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateIndexRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.CreateIndexRequest, dict]): + The request object. The request for + [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. + parent (str): + Required. A parent name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + index (google.cloud.firestore_admin_v1.types.Index): + Required. The composite index to + create. + + This corresponds to the ``index`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.Index` Cloud Firestore indexes enable simple and complex queries against + documents in a database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, index]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.CreateIndexRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.CreateIndexRequest): + request = firestore_admin.CreateIndexRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if index is not None: + request.index = index + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_index] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + gfa_index.Index, + metadata_type=gfa_operation.IndexOperationMetadata, + ) + + # Done; return the response. + return response + + def list_indexes(self, + request: Optional[Union[firestore_admin.ListIndexesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListIndexesPager: + r"""Lists composite indexes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_list_indexes(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListIndexesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_indexes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.ListIndexesRequest, dict]): + The request object. The request for + [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. + parent (str): + Required. A parent name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesPager: + The response for + [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ListIndexesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ListIndexesRequest): + request = firestore_admin.ListIndexesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_indexes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListIndexesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_index(self, + request: Optional[Union[firestore_admin.GetIndexRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> index.Index: + r"""Gets a composite index. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_get_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetIndexRequest( + name="name_value", + ) + + # Make the request + response = client.get_index(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.GetIndexRequest, dict]): + The request object. The request for + [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.Index: + Cloud Firestore indexes enable simple + and complex queries against documents in + a database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.GetIndexRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.GetIndexRequest): + request = firestore_admin.GetIndexRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_index] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_index(self, + request: Optional[Union[firestore_admin.DeleteIndexRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a composite index. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_delete_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteIndexRequest( + name="name_value", + ) + + # Make the request + client.delete_index(request=request) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.DeleteIndexRequest, dict]): + The request object. The request for + [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.DeleteIndexRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.DeleteIndexRequest): + request = firestore_admin.DeleteIndexRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_index] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_field(self, + request: Optional[Union[firestore_admin.GetFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> field.Field: + r"""Gets the metadata and configuration for a Field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_get_field(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetFieldRequest( + name="name_value", + ) + + # Make the request + response = client.get_field(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.GetFieldRequest, dict]): + The request object. The request for + [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.Field: + Represents a single field in the + database. + Fields are grouped by their "Collection + Group", which represent all collections + in the database with the same id. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.GetFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.GetFieldRequest): + request = firestore_admin.GetFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_field(self, + request: Optional[Union[firestore_admin.UpdateFieldRequest, dict]] = None, + *, + field: Optional[gfa_field.Field] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Updates a field configuration. Currently, field updates apply + only to single field index configuration. However, calls to + [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] + should provide a field mask to avoid changing any configuration + that the caller isn't aware of. The field mask should be + specified as: ``{ paths: "index_config" }``. + + This call returns a + [google.longrunning.Operation][google.longrunning.Operation] + which may be used to track the status of the field update. The + metadata for the operation will be the type + [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. + + To configure the default field settings for the database, use + the special ``Field`` with resource name: + ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_update_field(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + field = firestore_admin_v1.Field() + field.name = "name_value" + + request = firestore_admin_v1.UpdateFieldRequest( + field=field, + ) + + # Make the request + operation = client.update_field(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.UpdateFieldRequest, dict]): + The request object. The request for + [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. + field (google.cloud.firestore_admin_v1.types.Field): + Required. The field to be updated. + This corresponds to the ``field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Field` + Represents a single field in the database. + + Fields are grouped by their "Collection Group", which + represent all collections in the database with the + same id. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([field]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.UpdateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.UpdateFieldRequest): + request = firestore_admin.UpdateFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if field is not None: + request.field = field + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("field.name", request.field.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + gfa_field.Field, + metadata_type=gfa_operation.FieldOperationMetadata, + ) + + # Done; return the response. + return response + + def list_fields(self, + request: Optional[Union[firestore_admin.ListFieldsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListFieldsPager: + r"""Lists the field configuration and metadata for this database. + + Currently, + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + only supports listing fields that have been explicitly + overridden. To issue this query, call + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + with the filter set to ``indexConfig.usesAncestorConfig:false`` + or ``ttlConfig:*``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_list_fields(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListFieldsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_fields(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.ListFieldsRequest, dict]): + The request object. The request for + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. + parent (str): + Required. A parent name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsPager: + The response for + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ListFieldsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ListFieldsRequest): + request = firestore_admin.ListFieldsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_fields] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListFieldsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def export_documents(self, + request: Optional[Union[firestore_admin.ExportDocumentsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Exports a copy of all or a subset of documents from + Google Cloud Firestore to another storage system, such + as Google Cloud Storage. Recent updates to documents may + not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed + via the Operation resource that is created. The output + of an export may only be used once the associated + operation is done. If an export operation is cancelled + before completion it may leave partial data behind in + Google Cloud Storage. + + For more details on export behavior and output format, + refer to: + + https://cloud.google.com/firestore/docs/manage-data/export-import + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_export_documents(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ExportDocumentsRequest( + name="name_value", + ) + + # Make the request + operation = client.export_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.ExportDocumentsRequest, dict]): + The request object. The request for + [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. + name (str): + Required. Database to export. Should be of the form: + ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.ExportDocumentsResponse` Returned in the [google.longrunning.Operation][google.longrunning.Operation] + response field. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ExportDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ExportDocumentsRequest): + request = firestore_admin.ExportDocumentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + gfa_operation.ExportDocumentsResponse, + metadata_type=gfa_operation.ExportDocumentsMetadata, + ) + + # Done; return the response. + return response + + def import_documents(self, + request: Optional[Union[firestore_admin.ImportDocumentsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Imports documents into Google Cloud Firestore. + Existing documents with the same name are overwritten. + The import occurs in the background and its progress can + be monitored and managed via the Operation resource that + is created. If an ImportDocuments operation is + cancelled, it is possible that a subset of the data has + already been imported to Cloud Firestore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_import_documents(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ImportDocumentsRequest( + name="name_value", + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.ImportDocumentsRequest, dict]): + The request object. The request for + [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. + name (str): + Required. Database to import into. Should be of the + form: ``projects/{project_id}/databases/{database_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ImportDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ImportDocumentsRequest): + request = firestore_admin.ImportDocumentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=gfa_operation.ImportDocumentsMetadata, + ) + + # Done; return the response. + return response + + def create_database(self, + request: Optional[Union[firestore_admin.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[gfa_database.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Create a database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_create_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.CreateDatabaseRequest, dict]): + The request object. The request for + [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. + parent (str): + Required. A parent name of the form + ``projects/{project_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (google.cloud.firestore_admin_v1.types.Database): + Required. The Database to create. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (str): + Required. The ID to use for the database, which will + become the final component of the database's resource + name. + + This value should be 4-63 characters. Valid characters + are /[a-z][0-9]-/ with first character a letter and the + last a letter or a number. Must not be UUID-like + /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. + + "(default)" database id is also valid. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Database` + A Cloud Firestore Database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, database, database_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.CreateDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.CreateDatabaseRequest): + request = firestore_admin.CreateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + gfa_database.Database, + metadata_type=firestore_admin.CreateDatabaseMetadata, + ) + + # Done; return the response. + return response + + def get_database(self, + request: Optional[Union[firestore_admin.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> database.Database: + r"""Gets information about a database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_get_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.GetDatabaseRequest, dict]): + The request object. The request for + [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.Database: + A Cloud Firestore Database. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.GetDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.GetDatabaseRequest): + request = firestore_admin.GetDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_databases(self, + request: Optional[Union[firestore_admin.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore_admin.ListDatabasesResponse: + r"""List all the databases in the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_list_databases(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_databases(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.ListDatabasesRequest, dict]): + The request object. A request to list the Firestore + Databases in all locations for a + project. + parent (str): + Required. A parent name of the form + ``projects/{project_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.ListDatabasesResponse: + The list of databases for a project. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ListDatabasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ListDatabasesRequest): + request = firestore_admin.ListDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_database(self, + request: Optional[Union[firestore_admin.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[gfa_database.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Updates a database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_update_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.UpdateDatabaseRequest( + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest, dict]): + The request object. The request for + [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. + database (google.cloud.firestore_admin_v1.types.Database): + Required. The database to update. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Database` + A Cloud Firestore Database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.UpdateDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.UpdateDatabaseRequest): + request = firestore_admin.UpdateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database.name", request.database.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + gfa_database.Database, + metadata_type=firestore_admin.UpdateDatabaseMetadata, + ) + + # Done; return the response. + return response + + def delete_database(self, + request: Optional[Union[firestore_admin.DeleteDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Deletes a database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_delete_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest, dict]): + The request object. The request for + [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Database` + A Cloud Firestore Database. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.DeleteDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.DeleteDatabaseRequest): + request = firestore_admin.DeleteDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + database.Database, + metadata_type=firestore_admin.DeleteDatabaseMetadata, + ) + + # Done; return the response. + return response + + def get_backup(self, + request: Optional[Union[firestore_admin.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup.Backup: + r"""Gets information about a backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_get_backup(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.GetBackupRequest, dict]): + The request object. The request for + [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. + name (str): + Required. Name of the backup to fetch. + + Format is + ``projects/{project}/locations/{location}/backups/{backup}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.Backup: + A Backup of a Cloud Firestore + Database. + The backup contains all documents and + index configurations for the given + database at a specific point in time. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.GetBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.GetBackupRequest): + request = firestore_admin.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backups(self, + request: Optional[Union[firestore_admin.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore_admin.ListBackupsResponse: + r"""Lists all the backups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_list_backups(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_backups(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.ListBackupsRequest, dict]): + The request object. The request for + [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. + parent (str): + Required. The location to list backups from. + + Format is ``projects/{project}/locations/{location}``. + Use ``{location} = '-'`` to list backups from all + locations for the given project. This allows listing + backups from a single location or from all locations. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.ListBackupsResponse: + The response for + [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ListBackupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ListBackupsRequest): + request = firestore_admin.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup(self, + request: Optional[Union[firestore_admin.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_delete_backup(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + client.delete_backup(request=request) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.DeleteBackupRequest, dict]): + The request object. The request for + [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. + name (str): + Required. Name of the backup to delete. + + format is + ``projects/{project}/locations/{location}/backups/{backup}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.DeleteBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.DeleteBackupRequest): + request = firestore_admin.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def restore_database(self, + request: Optional[Union[firestore_admin.RestoreDatabaseRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gac_operation.Operation: + r"""Creates a new database by restoring from an existing backup. + + The new database must be in the same cloud region or + multi-region location as the existing backup. This behaves + similar to + [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.CreateDatabase] + except instead of creating a new empty database, a new database + is created with the database type, index configuration, and + documents from an existing backup. + + The [long-running operation][google.longrunning.Operation] can + be used to track the progress of the restore, with the + Operation's [metadata][google.longrunning.Operation.metadata] + field type being the + [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + the [Database][google.firestore.admin.v1.Database] if the + restore was successful. The new database is not readable or + writeable until the LRO has completed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_restore_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.RestoreDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + backup="backup_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest, dict]): + The request object. The request message for + [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.RestoreDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.firestore_admin_v1.types.Database` + A Cloud Firestore Database. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.RestoreDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.RestoreDatabaseRequest): + request = firestore_admin.RestoreDatabaseRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = gac_operation.from_gapic( + response, + self._transport.operations_client, + database.Database, + metadata_type=gfa_operation.RestoreDatabaseMetadata, + ) + + # Done; return the response. + return response + + def create_backup_schedule(self, + request: Optional[Union[firestore_admin.CreateBackupScheduleRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_schedule: Optional[schedule.BackupSchedule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schedule.BackupSchedule: + r"""Creates a backup schedule on a database. + At most two backup schedules can be configured on a + database, one daily backup schedule and one weekly + backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_create_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateBackupScheduleRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest, dict]): + The request object. The request for + [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. + parent (str): + Required. The parent database. + + Format ``projects/{project}/databases/{database}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): + Required. The backup schedule to + create. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.BackupSchedule: + A backup schedule for a Cloud + Firestore Database. + This resource is owned by the database + it is backing up, and is deleted along + with the database. The actual backups + are not though. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_schedule]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.CreateBackupScheduleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.CreateBackupScheduleRequest): + request = firestore_admin.CreateBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_schedule is not None: + request.backup_schedule = backup_schedule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup_schedule(self, + request: Optional[Union[firestore_admin.GetBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schedule.BackupSchedule: + r"""Gets information about a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_get_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest, dict]): + The request object. The request for + [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. + name (str): + Required. The name of the backup schedule. + + Format + ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.BackupSchedule: + A backup schedule for a Cloud + Firestore Database. + This resource is owned by the database + it is backing up, and is deleted along + with the database. The actual backups + are not though. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.GetBackupScheduleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.GetBackupScheduleRequest): + request = firestore_admin.GetBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_schedules(self, + request: Optional[Union[firestore_admin.ListBackupSchedulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> firestore_admin.ListBackupSchedulesResponse: + r"""List backup schedules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_list_backup_schedules(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_backup_schedules(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest, dict]): + The request object. The request for + [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. + parent (str): + Required. The parent database. + + Format is ``projects/{project}/databases/{database}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse: + The response for + [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ListBackupSchedulesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ListBackupSchedulesRequest): + request = firestore_admin.ListBackupSchedulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_schedules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_schedule(self, + request: Optional[Union[firestore_admin.UpdateBackupScheduleRequest, dict]] = None, + *, + backup_schedule: Optional[schedule.BackupSchedule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schedule.BackupSchedule: + r"""Updates a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_update_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = client.update_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest, dict]): + The request object. The request for + [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. + backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): + Required. The backup schedule to + update. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.firestore_admin_v1.types.BackupSchedule: + A backup schedule for a Cloud + Firestore Database. + This resource is owned by the database + it is backing up, and is deleted along + with the database. The actual backups + are not though. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_schedule, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.UpdateBackupScheduleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.UpdateBackupScheduleRequest): + request = firestore_admin.UpdateBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup_schedule.name", request.backup_schedule.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_schedule(self, + request: Optional[Union[firestore_admin.DeleteBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_admin_v1 + + def sample_delete_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + client.delete_backup_schedule(request=request) + + Args: + request (Union[google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest, dict]): + The request object. The request for + [FirestoreAdmin.DeleteBackupSchedules][]. + name (str): + Required. The name of the backup schedule. + + Format + ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.DeleteBackupScheduleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.DeleteBackupScheduleRequest): + request = firestore_admin.DeleteBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "FirestoreAdminClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "FirestoreAdminClient", +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py new file mode 100644 index 0000000000..65a4663eee --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index + + +class ListIndexesPager: + """A pager for iterating through ``list_indexes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``indexes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListIndexes`` requests and continue to iterate + through the ``indexes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., firestore_admin.ListIndexesResponse], + request: firestore_admin.ListIndexesRequest, + response: firestore_admin.ListIndexesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_admin_v1.types.ListIndexesRequest): + The initial request object. + response (google.cloud.firestore_admin_v1.types.ListIndexesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore_admin.ListIndexesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[firestore_admin.ListIndexesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[index.Index]: + for page in self.pages: + yield from page.indexes + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListIndexesAsyncPager: + """A pager for iterating through ``list_indexes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``indexes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListIndexes`` requests and continue to iterate + through the ``indexes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[firestore_admin.ListIndexesResponse]], + request: firestore_admin.ListIndexesRequest, + response: firestore_admin.ListIndexesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_admin_v1.types.ListIndexesRequest): + The initial request object. + response (google.cloud.firestore_admin_v1.types.ListIndexesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore_admin.ListIndexesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[firestore_admin.ListIndexesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[index.Index]: + async def async_generator(): + async for page in self.pages: + for response in page.indexes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListFieldsPager: + """A pager for iterating through ``list_fields`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``fields`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListFields`` requests and continue to iterate + through the ``fields`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., firestore_admin.ListFieldsResponse], + request: firestore_admin.ListFieldsRequest, + response: firestore_admin.ListFieldsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_admin_v1.types.ListFieldsRequest): + The initial request object. + response (google.cloud.firestore_admin_v1.types.ListFieldsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore_admin.ListFieldsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[firestore_admin.ListFieldsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[field.Field]: + for page in self.pages: + yield from page.fields + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListFieldsAsyncPager: + """A pager for iterating through ``list_fields`` requests. + + This class thinly wraps an initial + :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``fields`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListFields`` requests and continue to iterate + through the ``fields`` field on the + corresponding responses. + + All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[firestore_admin.ListFieldsResponse]], + request: firestore_admin.ListFieldsRequest, + response: firestore_admin.ListFieldsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.firestore_admin_v1.types.ListFieldsRequest): + The initial request object. + response (google.cloud.firestore_admin_v1.types.ListFieldsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = firestore_admin.ListFieldsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[firestore_admin.ListFieldsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[field.Field]: + async def async_generator(): + async for page in self.pages: + for response in page.fields: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py new file mode 100644 index 0000000000..e3727c9b57 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FirestoreAdminTransport +from .grpc import FirestoreAdminGrpcTransport +from .grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport +from .rest import FirestoreAdminRestTransport +from .rest import FirestoreAdminRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreAdminTransport]] +_transport_registry['grpc'] = FirestoreAdminGrpcTransport +_transport_registry['grpc_asyncio'] = FirestoreAdminGrpcAsyncIOTransport +_transport_registry['rest'] = FirestoreAdminRestTransport + +__all__ = ( + 'FirestoreAdminTransport', + 'FirestoreAdminGrpcTransport', + 'FirestoreAdminGrpcAsyncIOTransport', + 'FirestoreAdminRestTransport', + 'FirestoreAdminRestInterceptor', +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py new file mode 100644 index 0000000000..ae11033e03 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py @@ -0,0 +1,551 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.firestore_admin_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class FirestoreAdminTransport(abc.ABC): + """Abstract transport class for FirestoreAdmin.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', + ) + + DEFAULT_HOST: str = 'firestore.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_index: gapic_v1.method.wrap_method( + self.create_index, + default_timeout=60.0, + client_info=client_info, + ), + self.list_indexes: gapic_v1.method.wrap_method( + self.list_indexes, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_index: gapic_v1.method.wrap_method( + self.get_index, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_index: gapic_v1.method.wrap_method( + self.delete_index, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_field: gapic_v1.method.wrap_method( + self.get_field, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_field: gapic_v1.method.wrap_method( + self.update_field, + default_timeout=60.0, + client_info=client_info, + ), + self.list_fields: gapic_v1.method.wrap_method( + self.list_fields, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.export_documents: gapic_v1.method.wrap_method( + self.export_documents, + default_timeout=60.0, + client_info=client_info, + ), + self.import_documents: gapic_v1.method.wrap_method( + self.import_documents, + default_timeout=60.0, + client_info=client_info, + ), + self.create_database: gapic_v1.method.wrap_method( + self.create_database, + default_timeout=None, + client_info=client_info, + ), + self.get_database: gapic_v1.method.wrap_method( + self.get_database, + default_timeout=None, + client_info=client_info, + ), + self.list_databases: gapic_v1.method.wrap_method( + self.list_databases, + default_timeout=None, + client_info=client_info, + ), + self.update_database: gapic_v1.method.wrap_method( + self.update_database, + default_timeout=None, + client_info=client_info, + ), + self.delete_database: gapic_v1.method.wrap_method( + self.delete_database, + default_timeout=None, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_timeout=None, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.restore_database: gapic_v1.method.wrap_method( + self.restore_database, + default_timeout=None, + client_info=client_info, + ), + self.create_backup_schedule: gapic_v1.method.wrap_method( + self.create_backup_schedule, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_schedule: gapic_v1.method.wrap_method( + self.get_backup_schedule, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_schedules: gapic_v1.method.wrap_method( + self.list_backup_schedules, + default_timeout=None, + client_info=client_info, + ), + self.update_backup_schedule: gapic_v1.method.wrap_method( + self.update_backup_schedule, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_schedule: gapic_v1.method.wrap_method( + self.delete_backup_schedule, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_index(self) -> Callable[ + [firestore_admin.CreateIndexRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_indexes(self) -> Callable[ + [firestore_admin.ListIndexesRequest], + Union[ + firestore_admin.ListIndexesResponse, + Awaitable[firestore_admin.ListIndexesResponse] + ]]: + raise NotImplementedError() + + @property + def get_index(self) -> Callable[ + [firestore_admin.GetIndexRequest], + Union[ + index.Index, + Awaitable[index.Index] + ]]: + raise NotImplementedError() + + @property + def delete_index(self) -> Callable[ + [firestore_admin.DeleteIndexRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_field(self) -> Callable[ + [firestore_admin.GetFieldRequest], + Union[ + field.Field, + Awaitable[field.Field] + ]]: + raise NotImplementedError() + + @property + def update_field(self) -> Callable[ + [firestore_admin.UpdateFieldRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_fields(self) -> Callable[ + [firestore_admin.ListFieldsRequest], + Union[ + firestore_admin.ListFieldsResponse, + Awaitable[firestore_admin.ListFieldsResponse] + ]]: + raise NotImplementedError() + + @property + def export_documents(self) -> Callable[ + [firestore_admin.ExportDocumentsRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def import_documents(self) -> Callable[ + [firestore_admin.ImportDocumentsRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_database(self) -> Callable[ + [firestore_admin.CreateDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_database(self) -> Callable[ + [firestore_admin.GetDatabaseRequest], + Union[ + database.Database, + Awaitable[database.Database] + ]]: + raise NotImplementedError() + + @property + def list_databases(self) -> Callable[ + [firestore_admin.ListDatabasesRequest], + Union[ + firestore_admin.ListDatabasesResponse, + Awaitable[firestore_admin.ListDatabasesResponse] + ]]: + raise NotImplementedError() + + @property + def update_database(self) -> Callable[ + [firestore_admin.UpdateDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_database(self) -> Callable[ + [firestore_admin.DeleteDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_backup(self) -> Callable[ + [firestore_admin.GetBackupRequest], + Union[ + backup.Backup, + Awaitable[backup.Backup] + ]]: + raise NotImplementedError() + + @property + def list_backups(self) -> Callable[ + [firestore_admin.ListBackupsRequest], + Union[ + firestore_admin.ListBackupsResponse, + Awaitable[firestore_admin.ListBackupsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_backup(self) -> Callable[ + [firestore_admin.DeleteBackupRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def restore_database(self) -> Callable[ + [firestore_admin.RestoreDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_backup_schedule(self) -> Callable[ + [firestore_admin.CreateBackupScheduleRequest], + Union[ + schedule.BackupSchedule, + Awaitable[schedule.BackupSchedule] + ]]: + raise NotImplementedError() + + @property + def get_backup_schedule(self) -> Callable[ + [firestore_admin.GetBackupScheduleRequest], + Union[ + schedule.BackupSchedule, + Awaitable[schedule.BackupSchedule] + ]]: + raise NotImplementedError() + + @property + def list_backup_schedules(self) -> Callable[ + [firestore_admin.ListBackupSchedulesRequest], + Union[ + firestore_admin.ListBackupSchedulesResponse, + Awaitable[firestore_admin.ListBackupSchedulesResponse] + ]]: + raise NotImplementedError() + + @property + def update_backup_schedule(self) -> Callable[ + [firestore_admin.UpdateBackupScheduleRequest], + Union[ + schedule.BackupSchedule, + Awaitable[schedule.BackupSchedule] + ]]: + raise NotImplementedError() + + @property + def delete_backup_schedule(self) -> Callable[ + [firestore_admin.DeleteBackupScheduleRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'FirestoreAdminTransport', +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py new file mode 100644 index 0000000000..284a6cfe16 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py @@ -0,0 +1,1032 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO + + +class FirestoreAdminGrpcTransport(FirestoreAdminTransport): + """gRPC backend transport for FirestoreAdmin. + + The Cloud Firestore Admin API. + + This API provides several administrative services for Cloud + Firestore. + + Project, Database, Namespace, Collection, Collection Group, and + Document are used as defined in the Google Cloud Firestore API. + + Operation: An Operation represents work being performed in the + background. + + The index service manages Cloud Firestore indexes. + + Index creation is performed asynchronously. An Operation resource is + created for each such asynchronous operation. The state of the + operation (including any errors encountered) may be queried via the + Operation resource. + + The Operations collection provides a record of actions performed for + the specified Project (including any Operations in progress). + Operations are not created directly but through calls on other + collections or resources. + + An Operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. Operations are garbage + collected after 30 days. By default, ListOperations will only return + in progress and failed operations. To list completed operation, + issue a ListOperations request with the filter ``done: true``. + + Operations are created by service ``FirestoreAdmin``, but are + accessed via service ``google.longrunning.Operations``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_index(self) -> Callable[ + [firestore_admin.CreateIndexRequest], + operations_pb2.Operation]: + r"""Return a callable for the create index method over gRPC. + + Creates a composite index. This returns a + [google.longrunning.Operation][google.longrunning.Operation] + which may be used to track the status of the creation. The + metadata for the operation will be the type + [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. + + Returns: + Callable[[~.CreateIndexRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_index' not in self._stubs: + self._stubs['create_index'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/CreateIndex', + request_serializer=firestore_admin.CreateIndexRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_index'] + + @property + def list_indexes(self) -> Callable[ + [firestore_admin.ListIndexesRequest], + firestore_admin.ListIndexesResponse]: + r"""Return a callable for the list indexes method over gRPC. + + Lists composite indexes. + + Returns: + Callable[[~.ListIndexesRequest], + ~.ListIndexesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_indexes' not in self._stubs: + self._stubs['list_indexes'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListIndexes', + request_serializer=firestore_admin.ListIndexesRequest.serialize, + response_deserializer=firestore_admin.ListIndexesResponse.deserialize, + ) + return self._stubs['list_indexes'] + + @property + def get_index(self) -> Callable[ + [firestore_admin.GetIndexRequest], + index.Index]: + r"""Return a callable for the get index method over gRPC. + + Gets a composite index. + + Returns: + Callable[[~.GetIndexRequest], + ~.Index]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_index' not in self._stubs: + self._stubs['get_index'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetIndex', + request_serializer=firestore_admin.GetIndexRequest.serialize, + response_deserializer=index.Index.deserialize, + ) + return self._stubs['get_index'] + + @property + def delete_index(self) -> Callable[ + [firestore_admin.DeleteIndexRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete index method over gRPC. + + Deletes a composite index. + + Returns: + Callable[[~.DeleteIndexRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_index' not in self._stubs: + self._stubs['delete_index'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex', + request_serializer=firestore_admin.DeleteIndexRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_index'] + + @property + def get_field(self) -> Callable[ + [firestore_admin.GetFieldRequest], + field.Field]: + r"""Return a callable for the get field method over gRPC. + + Gets the metadata and configuration for a Field. + + Returns: + Callable[[~.GetFieldRequest], + ~.Field]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_field' not in self._stubs: + self._stubs['get_field'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetField', + request_serializer=firestore_admin.GetFieldRequest.serialize, + response_deserializer=field.Field.deserialize, + ) + return self._stubs['get_field'] + + @property + def update_field(self) -> Callable[ + [firestore_admin.UpdateFieldRequest], + operations_pb2.Operation]: + r"""Return a callable for the update field method over gRPC. + + Updates a field configuration. Currently, field updates apply + only to single field index configuration. However, calls to + [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] + should provide a field mask to avoid changing any configuration + that the caller isn't aware of. The field mask should be + specified as: ``{ paths: "index_config" }``. + + This call returns a + [google.longrunning.Operation][google.longrunning.Operation] + which may be used to track the status of the field update. The + metadata for the operation will be the type + [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. + + To configure the default field settings for the database, use + the special ``Field`` with resource name: + ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. + + Returns: + Callable[[~.UpdateFieldRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_field' not in self._stubs: + self._stubs['update_field'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/UpdateField', + request_serializer=firestore_admin.UpdateFieldRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_field'] + + @property + def list_fields(self) -> Callable[ + [firestore_admin.ListFieldsRequest], + firestore_admin.ListFieldsResponse]: + r"""Return a callable for the list fields method over gRPC. + + Lists the field configuration and metadata for this database. + + Currently, + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + only supports listing fields that have been explicitly + overridden. To issue this query, call + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + with the filter set to ``indexConfig.usesAncestorConfig:false`` + or ``ttlConfig:*``. + + Returns: + Callable[[~.ListFieldsRequest], + ~.ListFieldsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_fields' not in self._stubs: + self._stubs['list_fields'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListFields', + request_serializer=firestore_admin.ListFieldsRequest.serialize, + response_deserializer=firestore_admin.ListFieldsResponse.deserialize, + ) + return self._stubs['list_fields'] + + @property + def export_documents(self) -> Callable[ + [firestore_admin.ExportDocumentsRequest], + operations_pb2.Operation]: + r"""Return a callable for the export documents method over gRPC. + + Exports a copy of all or a subset of documents from + Google Cloud Firestore to another storage system, such + as Google Cloud Storage. Recent updates to documents may + not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed + via the Operation resource that is created. The output + of an export may only be used once the associated + operation is done. If an export operation is cancelled + before completion it may leave partial data behind in + Google Cloud Storage. + + For more details on export behavior and output format, + refer to: + + https://cloud.google.com/firestore/docs/manage-data/export-import + + Returns: + Callable[[~.ExportDocumentsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_documents' not in self._stubs: + self._stubs['export_documents'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments', + request_serializer=firestore_admin.ExportDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['export_documents'] + + @property + def import_documents(self) -> Callable[ + [firestore_admin.ImportDocumentsRequest], + operations_pb2.Operation]: + r"""Return a callable for the import documents method over gRPC. + + Imports documents into Google Cloud Firestore. + Existing documents with the same name are overwritten. + The import occurs in the background and its progress can + be monitored and managed via the Operation resource that + is created. If an ImportDocuments operation is + cancelled, it is possible that a subset of the data has + already been imported to Cloud Firestore. + + Returns: + Callable[[~.ImportDocumentsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_documents' not in self._stubs: + self._stubs['import_documents'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments', + request_serializer=firestore_admin.ImportDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_documents'] + + @property + def create_database(self) -> Callable[ + [firestore_admin.CreateDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the create database method over gRPC. + + Create a database. + + Returns: + Callable[[~.CreateDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_database' not in self._stubs: + self._stubs['create_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/CreateDatabase', + request_serializer=firestore_admin.CreateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_database'] + + @property + def get_database(self) -> Callable[ + [firestore_admin.GetDatabaseRequest], + database.Database]: + r"""Return a callable for the get database method over gRPC. + + Gets information about a database. + + Returns: + Callable[[~.GetDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database' not in self._stubs: + self._stubs['get_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetDatabase', + request_serializer=firestore_admin.GetDatabaseRequest.serialize, + response_deserializer=database.Database.deserialize, + ) + return self._stubs['get_database'] + + @property + def list_databases(self) -> Callable[ + [firestore_admin.ListDatabasesRequest], + firestore_admin.ListDatabasesResponse]: + r"""Return a callable for the list databases method over gRPC. + + List all the databases in the project. + + Returns: + Callable[[~.ListDatabasesRequest], + ~.ListDatabasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_databases' not in self._stubs: + self._stubs['list_databases'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListDatabases', + request_serializer=firestore_admin.ListDatabasesRequest.serialize, + response_deserializer=firestore_admin.ListDatabasesResponse.deserialize, + ) + return self._stubs['list_databases'] + + @property + def update_database(self) -> Callable[ + [firestore_admin.UpdateDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the update database method over gRPC. + + Updates a database. + + Returns: + Callable[[~.UpdateDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database' not in self._stubs: + self._stubs['update_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/UpdateDatabase', + request_serializer=firestore_admin.UpdateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database'] + + @property + def delete_database(self) -> Callable[ + [firestore_admin.DeleteDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete database method over gRPC. + + Deletes a database. + + Returns: + Callable[[~.DeleteDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_database' not in self._stubs: + self._stubs['delete_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/DeleteDatabase', + request_serializer=firestore_admin.DeleteDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_database'] + + @property + def get_backup(self) -> Callable[ + [firestore_admin.GetBackupRequest], + backup.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets information about a backup. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup' not in self._stubs: + self._stubs['get_backup'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetBackup', + request_serializer=firestore_admin.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs['get_backup'] + + @property + def list_backups(self) -> Callable[ + [firestore_admin.ListBackupsRequest], + firestore_admin.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists all the backups. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backups' not in self._stubs: + self._stubs['list_backups'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListBackups', + request_serializer=firestore_admin.ListBackupsRequest.serialize, + response_deserializer=firestore_admin.ListBackupsResponse.deserialize, + ) + return self._stubs['list_backups'] + + @property + def delete_backup(self) -> Callable[ + [firestore_admin.DeleteBackupRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup' not in self._stubs: + self._stubs['delete_backup'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/DeleteBackup', + request_serializer=firestore_admin.DeleteBackupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup'] + + @property + def restore_database(self) -> Callable[ + [firestore_admin.RestoreDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the restore database method over gRPC. + + Creates a new database by restoring from an existing backup. + + The new database must be in the same cloud region or + multi-region location as the existing backup. This behaves + similar to + [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.CreateDatabase] + except instead of creating a new empty database, a new database + is created with the database type, index configuration, and + documents from an existing backup. + + The [long-running operation][google.longrunning.Operation] can + be used to track the progress of the restore, with the + Operation's [metadata][google.longrunning.Operation.metadata] + field type being the + [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + the [Database][google.firestore.admin.v1.Database] if the + restore was successful. The new database is not readable or + writeable until the LRO has completed. + + Returns: + Callable[[~.RestoreDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restore_database' not in self._stubs: + self._stubs['restore_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/RestoreDatabase', + request_serializer=firestore_admin.RestoreDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restore_database'] + + @property + def create_backup_schedule(self) -> Callable[ + [firestore_admin.CreateBackupScheduleRequest], + schedule.BackupSchedule]: + r"""Return a callable for the create backup schedule method over gRPC. + + Creates a backup schedule on a database. + At most two backup schedules can be configured on a + database, one daily backup schedule and one weekly + backup schedule. + + Returns: + Callable[[~.CreateBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup_schedule' not in self._stubs: + self._stubs['create_backup_schedule'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/CreateBackupSchedule', + request_serializer=firestore_admin.CreateBackupScheduleRequest.serialize, + response_deserializer=schedule.BackupSchedule.deserialize, + ) + return self._stubs['create_backup_schedule'] + + @property + def get_backup_schedule(self) -> Callable[ + [firestore_admin.GetBackupScheduleRequest], + schedule.BackupSchedule]: + r"""Return a callable for the get backup schedule method over gRPC. + + Gets information about a backup schedule. + + Returns: + Callable[[~.GetBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup_schedule' not in self._stubs: + self._stubs['get_backup_schedule'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetBackupSchedule', + request_serializer=firestore_admin.GetBackupScheduleRequest.serialize, + response_deserializer=schedule.BackupSchedule.deserialize, + ) + return self._stubs['get_backup_schedule'] + + @property + def list_backup_schedules(self) -> Callable[ + [firestore_admin.ListBackupSchedulesRequest], + firestore_admin.ListBackupSchedulesResponse]: + r"""Return a callable for the list backup schedules method over gRPC. + + List backup schedules. + + Returns: + Callable[[~.ListBackupSchedulesRequest], + ~.ListBackupSchedulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_schedules' not in self._stubs: + self._stubs['list_backup_schedules'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListBackupSchedules', + request_serializer=firestore_admin.ListBackupSchedulesRequest.serialize, + response_deserializer=firestore_admin.ListBackupSchedulesResponse.deserialize, + ) + return self._stubs['list_backup_schedules'] + + @property + def update_backup_schedule(self) -> Callable[ + [firestore_admin.UpdateBackupScheduleRequest], + schedule.BackupSchedule]: + r"""Return a callable for the update backup schedule method over gRPC. + + Updates a backup schedule. + + Returns: + Callable[[~.UpdateBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup_schedule' not in self._stubs: + self._stubs['update_backup_schedule'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/UpdateBackupSchedule', + request_serializer=firestore_admin.UpdateBackupScheduleRequest.serialize, + response_deserializer=schedule.BackupSchedule.deserialize, + ) + return self._stubs['update_backup_schedule'] + + @property + def delete_backup_schedule(self) -> Callable[ + [firestore_admin.DeleteBackupScheduleRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete backup schedule method over gRPC. + + Deletes a backup schedule. + + Returns: + Callable[[~.DeleteBackupScheduleRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup_schedule' not in self._stubs: + self._stubs['delete_backup_schedule'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/DeleteBackupSchedule', + request_serializer=firestore_admin.DeleteBackupScheduleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup_schedule'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'FirestoreAdminGrpcTransport', +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py new file mode 100644 index 0000000000..edfc17104c --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py @@ -0,0 +1,1031 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO +from .grpc import FirestoreAdminGrpcTransport + + +class FirestoreAdminGrpcAsyncIOTransport(FirestoreAdminTransport): + """gRPC AsyncIO backend transport for FirestoreAdmin. + + The Cloud Firestore Admin API. + + This API provides several administrative services for Cloud + Firestore. + + Project, Database, Namespace, Collection, Collection Group, and + Document are used as defined in the Google Cloud Firestore API. + + Operation: An Operation represents work being performed in the + background. + + The index service manages Cloud Firestore indexes. + + Index creation is performed asynchronously. An Operation resource is + created for each such asynchronous operation. The state of the + operation (including any errors encountered) may be queried via the + Operation resource. + + The Operations collection provides a record of actions performed for + the specified Project (including any Operations in progress). + Operations are not created directly but through calls on other + collections or resources. + + An Operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. Operations are garbage + collected after 30 days. By default, ListOperations will only return + in progress and failed operations. To list completed operation, + issue a ListOperations request with the filter ``done: true``. + + Operations are created by service ``FirestoreAdmin``, but are + accessed via service ``google.longrunning.Operations``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_index(self) -> Callable[ + [firestore_admin.CreateIndexRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create index method over gRPC. + + Creates a composite index. This returns a + [google.longrunning.Operation][google.longrunning.Operation] + which may be used to track the status of the creation. The + metadata for the operation will be the type + [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. + + Returns: + Callable[[~.CreateIndexRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_index' not in self._stubs: + self._stubs['create_index'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/CreateIndex', + request_serializer=firestore_admin.CreateIndexRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_index'] + + @property + def list_indexes(self) -> Callable[ + [firestore_admin.ListIndexesRequest], + Awaitable[firestore_admin.ListIndexesResponse]]: + r"""Return a callable for the list indexes method over gRPC. + + Lists composite indexes. + + Returns: + Callable[[~.ListIndexesRequest], + Awaitable[~.ListIndexesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_indexes' not in self._stubs: + self._stubs['list_indexes'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListIndexes', + request_serializer=firestore_admin.ListIndexesRequest.serialize, + response_deserializer=firestore_admin.ListIndexesResponse.deserialize, + ) + return self._stubs['list_indexes'] + + @property + def get_index(self) -> Callable[ + [firestore_admin.GetIndexRequest], + Awaitable[index.Index]]: + r"""Return a callable for the get index method over gRPC. + + Gets a composite index. + + Returns: + Callable[[~.GetIndexRequest], + Awaitable[~.Index]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_index' not in self._stubs: + self._stubs['get_index'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetIndex', + request_serializer=firestore_admin.GetIndexRequest.serialize, + response_deserializer=index.Index.deserialize, + ) + return self._stubs['get_index'] + + @property + def delete_index(self) -> Callable[ + [firestore_admin.DeleteIndexRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete index method over gRPC. + + Deletes a composite index. + + Returns: + Callable[[~.DeleteIndexRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_index' not in self._stubs: + self._stubs['delete_index'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex', + request_serializer=firestore_admin.DeleteIndexRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_index'] + + @property + def get_field(self) -> Callable[ + [firestore_admin.GetFieldRequest], + Awaitable[field.Field]]: + r"""Return a callable for the get field method over gRPC. + + Gets the metadata and configuration for a Field. + + Returns: + Callable[[~.GetFieldRequest], + Awaitable[~.Field]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_field' not in self._stubs: + self._stubs['get_field'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetField', + request_serializer=firestore_admin.GetFieldRequest.serialize, + response_deserializer=field.Field.deserialize, + ) + return self._stubs['get_field'] + + @property + def update_field(self) -> Callable[ + [firestore_admin.UpdateFieldRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update field method over gRPC. + + Updates a field configuration. Currently, field updates apply + only to single field index configuration. However, calls to + [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] + should provide a field mask to avoid changing any configuration + that the caller isn't aware of. The field mask should be + specified as: ``{ paths: "index_config" }``. + + This call returns a + [google.longrunning.Operation][google.longrunning.Operation] + which may be used to track the status of the field update. The + metadata for the operation will be the type + [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. + + To configure the default field settings for the database, use + the special ``Field`` with resource name: + ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. + + Returns: + Callable[[~.UpdateFieldRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_field' not in self._stubs: + self._stubs['update_field'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/UpdateField', + request_serializer=firestore_admin.UpdateFieldRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_field'] + + @property + def list_fields(self) -> Callable[ + [firestore_admin.ListFieldsRequest], + Awaitable[firestore_admin.ListFieldsResponse]]: + r"""Return a callable for the list fields method over gRPC. + + Lists the field configuration and metadata for this database. + + Currently, + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + only supports listing fields that have been explicitly + overridden. To issue this query, call + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + with the filter set to ``indexConfig.usesAncestorConfig:false`` + or ``ttlConfig:*``. + + Returns: + Callable[[~.ListFieldsRequest], + Awaitable[~.ListFieldsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_fields' not in self._stubs: + self._stubs['list_fields'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListFields', + request_serializer=firestore_admin.ListFieldsRequest.serialize, + response_deserializer=firestore_admin.ListFieldsResponse.deserialize, + ) + return self._stubs['list_fields'] + + @property + def export_documents(self) -> Callable[ + [firestore_admin.ExportDocumentsRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the export documents method over gRPC. + + Exports a copy of all or a subset of documents from + Google Cloud Firestore to another storage system, such + as Google Cloud Storage. Recent updates to documents may + not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed + via the Operation resource that is created. The output + of an export may only be used once the associated + operation is done. If an export operation is cancelled + before completion it may leave partial data behind in + Google Cloud Storage. + + For more details on export behavior and output format, + refer to: + + https://cloud.google.com/firestore/docs/manage-data/export-import + + Returns: + Callable[[~.ExportDocumentsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_documents' not in self._stubs: + self._stubs['export_documents'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments', + request_serializer=firestore_admin.ExportDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['export_documents'] + + @property + def import_documents(self) -> Callable[ + [firestore_admin.ImportDocumentsRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the import documents method over gRPC. + + Imports documents into Google Cloud Firestore. + Existing documents with the same name are overwritten. + The import occurs in the background and its progress can + be monitored and managed via the Operation resource that + is created. If an ImportDocuments operation is + cancelled, it is possible that a subset of the data has + already been imported to Cloud Firestore. + + Returns: + Callable[[~.ImportDocumentsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_documents' not in self._stubs: + self._stubs['import_documents'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments', + request_serializer=firestore_admin.ImportDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_documents'] + + @property + def create_database(self) -> Callable[ + [firestore_admin.CreateDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create database method over gRPC. + + Create a database. + + Returns: + Callable[[~.CreateDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_database' not in self._stubs: + self._stubs['create_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/CreateDatabase', + request_serializer=firestore_admin.CreateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_database'] + + @property + def get_database(self) -> Callable[ + [firestore_admin.GetDatabaseRequest], + Awaitable[database.Database]]: + r"""Return a callable for the get database method over gRPC. + + Gets information about a database. + + Returns: + Callable[[~.GetDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database' not in self._stubs: + self._stubs['get_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetDatabase', + request_serializer=firestore_admin.GetDatabaseRequest.serialize, + response_deserializer=database.Database.deserialize, + ) + return self._stubs['get_database'] + + @property + def list_databases(self) -> Callable[ + [firestore_admin.ListDatabasesRequest], + Awaitable[firestore_admin.ListDatabasesResponse]]: + r"""Return a callable for the list databases method over gRPC. + + List all the databases in the project. + + Returns: + Callable[[~.ListDatabasesRequest], + Awaitable[~.ListDatabasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_databases' not in self._stubs: + self._stubs['list_databases'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListDatabases', + request_serializer=firestore_admin.ListDatabasesRequest.serialize, + response_deserializer=firestore_admin.ListDatabasesResponse.deserialize, + ) + return self._stubs['list_databases'] + + @property + def update_database(self) -> Callable[ + [firestore_admin.UpdateDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update database method over gRPC. + + Updates a database. + + Returns: + Callable[[~.UpdateDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database' not in self._stubs: + self._stubs['update_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/UpdateDatabase', + request_serializer=firestore_admin.UpdateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database'] + + @property + def delete_database(self) -> Callable[ + [firestore_admin.DeleteDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete database method over gRPC. + + Deletes a database. + + Returns: + Callable[[~.DeleteDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_database' not in self._stubs: + self._stubs['delete_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/DeleteDatabase', + request_serializer=firestore_admin.DeleteDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_database'] + + @property + def get_backup(self) -> Callable[ + [firestore_admin.GetBackupRequest], + Awaitable[backup.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets information about a backup. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup' not in self._stubs: + self._stubs['get_backup'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetBackup', + request_serializer=firestore_admin.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs['get_backup'] + + @property + def list_backups(self) -> Callable[ + [firestore_admin.ListBackupsRequest], + Awaitable[firestore_admin.ListBackupsResponse]]: + r"""Return a callable for the list backups method over gRPC. + + Lists all the backups. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backups' not in self._stubs: + self._stubs['list_backups'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListBackups', + request_serializer=firestore_admin.ListBackupsRequest.serialize, + response_deserializer=firestore_admin.ListBackupsResponse.deserialize, + ) + return self._stubs['list_backups'] + + @property + def delete_backup(self) -> Callable[ + [firestore_admin.DeleteBackupRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup' not in self._stubs: + self._stubs['delete_backup'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/DeleteBackup', + request_serializer=firestore_admin.DeleteBackupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup'] + + @property + def restore_database(self) -> Callable[ + [firestore_admin.RestoreDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restore database method over gRPC. + + Creates a new database by restoring from an existing backup. + + The new database must be in the same cloud region or + multi-region location as the existing backup. This behaves + similar to + [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.CreateDatabase] + except instead of creating a new empty database, a new database + is created with the database type, index configuration, and + documents from an existing backup. + + The [long-running operation][google.longrunning.Operation] can + be used to track the progress of the restore, with the + Operation's [metadata][google.longrunning.Operation.metadata] + field type being the + [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + the [Database][google.firestore.admin.v1.Database] if the + restore was successful. The new database is not readable or + writeable until the LRO has completed. + + Returns: + Callable[[~.RestoreDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restore_database' not in self._stubs: + self._stubs['restore_database'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/RestoreDatabase', + request_serializer=firestore_admin.RestoreDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restore_database'] + + @property + def create_backup_schedule(self) -> Callable[ + [firestore_admin.CreateBackupScheduleRequest], + Awaitable[schedule.BackupSchedule]]: + r"""Return a callable for the create backup schedule method over gRPC. + + Creates a backup schedule on a database. + At most two backup schedules can be configured on a + database, one daily backup schedule and one weekly + backup schedule. + + Returns: + Callable[[~.CreateBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup_schedule' not in self._stubs: + self._stubs['create_backup_schedule'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/CreateBackupSchedule', + request_serializer=firestore_admin.CreateBackupScheduleRequest.serialize, + response_deserializer=schedule.BackupSchedule.deserialize, + ) + return self._stubs['create_backup_schedule'] + + @property + def get_backup_schedule(self) -> Callable[ + [firestore_admin.GetBackupScheduleRequest], + Awaitable[schedule.BackupSchedule]]: + r"""Return a callable for the get backup schedule method over gRPC. + + Gets information about a backup schedule. + + Returns: + Callable[[~.GetBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup_schedule' not in self._stubs: + self._stubs['get_backup_schedule'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/GetBackupSchedule', + request_serializer=firestore_admin.GetBackupScheduleRequest.serialize, + response_deserializer=schedule.BackupSchedule.deserialize, + ) + return self._stubs['get_backup_schedule'] + + @property + def list_backup_schedules(self) -> Callable[ + [firestore_admin.ListBackupSchedulesRequest], + Awaitable[firestore_admin.ListBackupSchedulesResponse]]: + r"""Return a callable for the list backup schedules method over gRPC. + + List backup schedules. + + Returns: + Callable[[~.ListBackupSchedulesRequest], + Awaitable[~.ListBackupSchedulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_schedules' not in self._stubs: + self._stubs['list_backup_schedules'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/ListBackupSchedules', + request_serializer=firestore_admin.ListBackupSchedulesRequest.serialize, + response_deserializer=firestore_admin.ListBackupSchedulesResponse.deserialize, + ) + return self._stubs['list_backup_schedules'] + + @property + def update_backup_schedule(self) -> Callable[ + [firestore_admin.UpdateBackupScheduleRequest], + Awaitable[schedule.BackupSchedule]]: + r"""Return a callable for the update backup schedule method over gRPC. + + Updates a backup schedule. + + Returns: + Callable[[~.UpdateBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup_schedule' not in self._stubs: + self._stubs['update_backup_schedule'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/UpdateBackupSchedule', + request_serializer=firestore_admin.UpdateBackupScheduleRequest.serialize, + response_deserializer=schedule.BackupSchedule.deserialize, + ) + return self._stubs['update_backup_schedule'] + + @property + def delete_backup_schedule(self) -> Callable[ + [firestore_admin.DeleteBackupScheduleRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete backup schedule method over gRPC. + + Deletes a backup schedule. + + Returns: + Callable[[~.DeleteBackupScheduleRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup_schedule' not in self._stubs: + self._stubs['delete_backup_schedule'] = self.grpc_channel.unary_unary( + '/google.firestore.admin.v1.FirestoreAdmin/DeleteBackupSchedule', + request_serializer=firestore_admin.DeleteBackupScheduleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup_schedule'] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'FirestoreAdminGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py new file mode 100644 index 0000000000..bc341d7348 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py @@ -0,0 +1,3178 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FirestoreAdminRestInterceptor: + """Interceptor for FirestoreAdmin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FirestoreAdminRestTransport. + + .. code-block:: python + class MyCustomFirestoreAdminInterceptor(FirestoreAdminRestInterceptor): + def pre_create_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_index(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_index(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_index(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_export_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_field(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_field(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_index(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_index(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_import_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_schedules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_schedules(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_databases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_databases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_fields(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_fields(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_indexes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_indexes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_field(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_field(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FirestoreAdminRestTransport(interceptor=MyCustomFirestoreAdminInterceptor()) + client = FirestoreAdminClient(transport=transport) + + + """ + def pre_create_backup_schedule(self, request: firestore_admin.CreateBackupScheduleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.CreateBackupScheduleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_create_backup_schedule(self, response: schedule.BackupSchedule) -> schedule.BackupSchedule: + """Post-rpc interceptor for create_backup_schedule + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_create_database(self, request: firestore_admin.CreateDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.CreateDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_create_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_database + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_create_index(self, request: firestore_admin.CreateIndexRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.CreateIndexRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_index + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_create_index(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_index + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_delete_backup(self, request: firestore_admin.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def pre_delete_backup_schedule(self, request: firestore_admin.DeleteBackupScheduleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.DeleteBackupScheduleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def pre_delete_database(self, request: firestore_admin.DeleteDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.DeleteDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_delete_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_database + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_delete_index(self, request: firestore_admin.DeleteIndexRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.DeleteIndexRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_index + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def pre_export_documents(self, request: firestore_admin.ExportDocumentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ExportDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_export_documents(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for export_documents + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_get_backup(self, request: firestore_admin.GetBackupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_get_backup(self, response: backup.Backup) -> backup.Backup: + """Post-rpc interceptor for get_backup + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_get_backup_schedule(self, request: firestore_admin.GetBackupScheduleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetBackupScheduleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_get_backup_schedule(self, response: schedule.BackupSchedule) -> schedule.BackupSchedule: + """Post-rpc interceptor for get_backup_schedule + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_get_database(self, request: firestore_admin.GetDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_get_database(self, response: database.Database) -> database.Database: + """Post-rpc interceptor for get_database + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_get_field(self, request: firestore_admin.GetFieldRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetFieldRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_field + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_get_field(self, response: field.Field) -> field.Field: + """Post-rpc interceptor for get_field + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_get_index(self, request: firestore_admin.GetIndexRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetIndexRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_index + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_get_index(self, response: index.Index) -> index.Index: + """Post-rpc interceptor for get_index + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_import_documents(self, request: firestore_admin.ImportDocumentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ImportDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_import_documents(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for import_documents + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_list_backups(self, request: firestore_admin.ListBackupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_list_backups(self, response: firestore_admin.ListBackupsResponse) -> firestore_admin.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_list_backup_schedules(self, request: firestore_admin.ListBackupSchedulesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListBackupSchedulesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_schedules + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_list_backup_schedules(self, response: firestore_admin.ListBackupSchedulesResponse) -> firestore_admin.ListBackupSchedulesResponse: + """Post-rpc interceptor for list_backup_schedules + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_list_databases(self, request: firestore_admin.ListDatabasesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListDatabasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_databases + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_list_databases(self, response: firestore_admin.ListDatabasesResponse) -> firestore_admin.ListDatabasesResponse: + """Post-rpc interceptor for list_databases + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_list_fields(self, request: firestore_admin.ListFieldsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListFieldsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_fields + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_list_fields(self, response: firestore_admin.ListFieldsResponse) -> firestore_admin.ListFieldsResponse: + """Post-rpc interceptor for list_fields + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_list_indexes(self, request: firestore_admin.ListIndexesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListIndexesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_indexes + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_list_indexes(self, response: firestore_admin.ListIndexesResponse) -> firestore_admin.ListIndexesResponse: + """Post-rpc interceptor for list_indexes + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_restore_database(self, request: firestore_admin.RestoreDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.RestoreDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_restore_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_database + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_update_backup_schedule(self, request: firestore_admin.UpdateBackupScheduleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.UpdateBackupScheduleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_update_backup_schedule(self, response: schedule.BackupSchedule) -> schedule.BackupSchedule: + """Post-rpc interceptor for update_backup_schedule + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_update_database(self, request: firestore_admin.UpdateDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.UpdateDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_update_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_database + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_update_field(self, request: firestore_admin.UpdateFieldRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.UpdateFieldRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_field + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_update_field(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_field + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirestoreAdmin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the FirestoreAdmin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FirestoreAdminRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FirestoreAdminRestInterceptor + + +class FirestoreAdminRestTransport(FirestoreAdminTransport): + """REST backend transport for FirestoreAdmin. + + The Cloud Firestore Admin API. + + This API provides several administrative services for Cloud + Firestore. + + Project, Database, Namespace, Collection, Collection Group, and + Document are used as defined in the Google Cloud Firestore API. + + Operation: An Operation represents work being performed in the + background. + + The index service manages Cloud Firestore indexes. + + Index creation is performed asynchronously. An Operation resource is + created for each such asynchronous operation. The state of the + operation (including any errors encountered) may be queried via the + Operation resource. + + The Operations collection provides a record of actions performed for + the specified Project (including any Operations in progress). + Operations are not created directly but through calls on other + collections or resources. + + An Operation that is done may be deleted so that it is no longer + listed as part of the Operation collection. Operations are garbage + collected after 30 days. By default, ListOperations will only return + in progress and failed operations. To list completed operation, + issue a ListOperations request with the filter ``done: true``. + + Operations are created by service ``FirestoreAdmin``, but are + accessed via service ``google.longrunning.Operations``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'firestore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[FirestoreAdminRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'firestore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FirestoreAdminRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/databases/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/databases/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateBackupSchedule(FirestoreAdminRestStub): + def __hash__(self): + return hash("CreateBackupSchedule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.CreateBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> schedule.BackupSchedule: + r"""Call the create backup schedule method over HTTP. + + Args: + request (~.firestore_admin.CreateBackupScheduleRequest): + The request object. The request for + [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schedule.BackupSchedule: + A backup schedule for a Cloud + Firestore Database. + This resource is owned by the database + it is backing up, and is deleted along + with the database. The actual backups + are not though. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*}/backupSchedules', + 'body': 'backup_schedule', + }, + ] + request, metadata = self._interceptor.pre_create_backup_schedule(request, metadata) + pb_request = firestore_admin.CreateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schedule.BackupSchedule() + pb_resp = schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_schedule(resp) + return resp + + class _CreateDatabase(FirestoreAdminRestStub): + def __hash__(self): + return hash("CreateDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "databaseId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.CreateDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create database method over HTTP. + + Args: + request (~.firestore_admin.CreateDatabaseRequest): + The request object. The request for + [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*}/databases', + 'body': 'database', + }, + ] + request, metadata = self._interceptor.pre_create_database(request, metadata) + pb_request = firestore_admin.CreateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_database(resp) + return resp + + class _CreateIndex(FirestoreAdminRestStub): + def __hash__(self): + return hash("CreateIndex") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.CreateIndexRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create index method over HTTP. + + Args: + request (~.firestore_admin.CreateIndexRequest): + The request object. The request for + [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes', + 'body': 'index', + }, + ] + request, metadata = self._interceptor.pre_create_index(request, metadata) + pb_request = firestore_admin.CreateIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_index(resp) + return resp + + class _DeleteBackup(FirestoreAdminRestStub): + def __hash__(self): + return hash("DeleteBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.DeleteBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete backup method over HTTP. + + Args: + request (~.firestore_admin.DeleteBackupRequest): + The request object. The request for + [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/backups/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + pb_request = firestore_admin.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteBackupSchedule(FirestoreAdminRestStub): + def __hash__(self): + return hash("DeleteBackupSchedule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.DeleteBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete backup schedule method over HTTP. + + Args: + request (~.firestore_admin.DeleteBackupScheduleRequest): + The request object. The request for + [FirestoreAdmin.DeleteBackupSchedules][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/databases/*/backupSchedules/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_backup_schedule(request, metadata) + pb_request = firestore_admin.DeleteBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDatabase(FirestoreAdminRestStub): + def __hash__(self): + return hash("DeleteDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.DeleteDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete database method over HTTP. + + Args: + request (~.firestore_admin.DeleteDatabaseRequest): + The request object. The request for + [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/databases/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_database(request, metadata) + pb_request = firestore_admin.DeleteDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_database(resp) + return resp + + class _DeleteIndex(FirestoreAdminRestStub): + def __hash__(self): + return hash("DeleteIndex") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.DeleteIndexRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete index method over HTTP. + + Args: + request (~.firestore_admin.DeleteIndexRequest): + The request object. The request for + [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_index(request, metadata) + pb_request = firestore_admin.DeleteIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ExportDocuments(FirestoreAdminRestStub): + def __hash__(self): + return hash("ExportDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.ExportDocumentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the export documents method over HTTP. + + Args: + request (~.firestore_admin.ExportDocumentsRequest): + The request object. The request for + [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/databases/*}:exportDocuments', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_export_documents(request, metadata) + pb_request = firestore_admin.ExportDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_documents(resp) + return resp + + class _GetBackup(FirestoreAdminRestStub): + def __hash__(self): + return hash("GetBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.GetBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> backup.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.firestore_admin.GetBackupRequest): + The request object. The request for + [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backup.Backup: + A Backup of a Cloud Firestore + Database. + The backup contains all documents and + index configurations for the given + database at a specific point in time. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/backups/*}', + }, + ] + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = firestore_admin.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup.Backup() + pb_resp = backup.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + return resp + + class _GetBackupSchedule(FirestoreAdminRestStub): + def __hash__(self): + return hash("GetBackupSchedule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.GetBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> schedule.BackupSchedule: + r"""Call the get backup schedule method over HTTP. + + Args: + request (~.firestore_admin.GetBackupScheduleRequest): + The request object. The request for + [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schedule.BackupSchedule: + A backup schedule for a Cloud + Firestore Database. + This resource is owned by the database + it is backing up, and is deleted along + with the database. The actual backups + are not though. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*/backupSchedules/*}', + }, + ] + request, metadata = self._interceptor.pre_get_backup_schedule(request, metadata) + pb_request = firestore_admin.GetBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schedule.BackupSchedule() + pb_resp = schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_schedule(resp) + return resp + + class _GetDatabase(FirestoreAdminRestStub): + def __hash__(self): + return hash("GetDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.GetDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> database.Database: + r"""Call the get database method over HTTP. + + Args: + request (~.firestore_admin.GetDatabaseRequest): + The request object. The request for + [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.database.Database: + A Cloud Firestore Database. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*}', + }, + ] + request, metadata = self._interceptor.pre_get_database(request, metadata) + pb_request = firestore_admin.GetDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = database.Database() + pb_resp = database.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_database(resp) + return resp + + class _GetField(FirestoreAdminRestStub): + def __hash__(self): + return hash("GetField") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.GetFieldRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> field.Field: + r"""Call the get field method over HTTP. + + Args: + request (~.firestore_admin.GetFieldRequest): + The request object. The request for + [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.field.Field: + Represents a single field in the + database. + Fields are grouped by their "Collection + Group", which represent all collections + in the database with the same id. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}', + }, + ] + request, metadata = self._interceptor.pre_get_field(request, metadata) + pb_request = firestore_admin.GetFieldRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = field.Field() + pb_resp = field.Field.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_field(resp) + return resp + + class _GetIndex(FirestoreAdminRestStub): + def __hash__(self): + return hash("GetIndex") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.GetIndexRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> index.Index: + r"""Call the get index method over HTTP. + + Args: + request (~.firestore_admin.GetIndexRequest): + The request object. The request for + [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.index.Index: + Cloud Firestore indexes enable simple + and complex queries against documents in + a database. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}', + }, + ] + request, metadata = self._interceptor.pre_get_index(request, metadata) + pb_request = firestore_admin.GetIndexRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = index.Index() + pb_resp = index.Index.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_index(resp) + return resp + + class _ImportDocuments(FirestoreAdminRestStub): + def __hash__(self): + return hash("ImportDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.ImportDocumentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the import documents method over HTTP. + + Args: + request (~.firestore_admin.ImportDocumentsRequest): + The request object. The request for + [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/databases/*}:importDocuments', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_import_documents(request, metadata) + pb_request = firestore_admin.ImportDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_documents(resp) + return resp + + class _ListBackups(FirestoreAdminRestStub): + def __hash__(self): + return hash("ListBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.ListBackupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore_admin.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.firestore_admin.ListBackupsRequest): + The request object. The request for + [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore_admin.ListBackupsResponse: + The response for + [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/backups', + }, + ] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + pb_request = firestore_admin.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore_admin.ListBackupsResponse() + pb_resp = firestore_admin.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + return resp + + class _ListBackupSchedules(FirestoreAdminRestStub): + def __hash__(self): + return hash("ListBackupSchedules") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.ListBackupSchedulesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore_admin.ListBackupSchedulesResponse: + r"""Call the list backup schedules method over HTTP. + + Args: + request (~.firestore_admin.ListBackupSchedulesRequest): + The request object. The request for + [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore_admin.ListBackupSchedulesResponse: + The response for + [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/databases/*}/backupSchedules', + }, + ] + request, metadata = self._interceptor.pre_list_backup_schedules(request, metadata) + pb_request = firestore_admin.ListBackupSchedulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore_admin.ListBackupSchedulesResponse() + pb_resp = firestore_admin.ListBackupSchedulesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_schedules(resp) + return resp + + class _ListDatabases(FirestoreAdminRestStub): + def __hash__(self): + return hash("ListDatabases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.ListDatabasesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore_admin.ListDatabasesResponse: + r"""Call the list databases method over HTTP. + + Args: + request (~.firestore_admin.ListDatabasesRequest): + The request object. A request to list the Firestore + Databases in all locations for a + project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore_admin.ListDatabasesResponse: + The list of databases for a project. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*}/databases', + }, + ] + request, metadata = self._interceptor.pre_list_databases(request, metadata) + pb_request = firestore_admin.ListDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore_admin.ListDatabasesResponse() + pb_resp = firestore_admin.ListDatabasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_databases(resp) + return resp + + class _ListFields(FirestoreAdminRestStub): + def __hash__(self): + return hash("ListFields") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.ListFieldsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore_admin.ListFieldsResponse: + r"""Call the list fields method over HTTP. + + Args: + request (~.firestore_admin.ListFieldsRequest): + The request object. The request for + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore_admin.ListFieldsResponse: + The response for + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields', + }, + ] + request, metadata = self._interceptor.pre_list_fields(request, metadata) + pb_request = firestore_admin.ListFieldsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore_admin.ListFieldsResponse() + pb_resp = firestore_admin.ListFieldsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_fields(resp) + return resp + + class _ListIndexes(FirestoreAdminRestStub): + def __hash__(self): + return hash("ListIndexes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.ListIndexesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> firestore_admin.ListIndexesResponse: + r"""Call the list indexes method over HTTP. + + Args: + request (~.firestore_admin.ListIndexesRequest): + The request object. The request for + [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.firestore_admin.ListIndexesResponse: + The response for + [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes', + }, + ] + request, metadata = self._interceptor.pre_list_indexes(request, metadata) + pb_request = firestore_admin.ListIndexesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firestore_admin.ListIndexesResponse() + pb_resp = firestore_admin.ListIndexesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_indexes(resp) + return resp + + class _RestoreDatabase(FirestoreAdminRestStub): + def __hash__(self): + return hash("RestoreDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.RestoreDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the restore database method over HTTP. + + Args: + request (~.firestore_admin.RestoreDatabaseRequest): + The request object. The request message for + [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.RestoreDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*}/databases:restore', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_restore_database(request, metadata) + pb_request = firestore_admin.RestoreDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_database(resp) + return resp + + class _UpdateBackupSchedule(FirestoreAdminRestStub): + def __hash__(self): + return hash("UpdateBackupSchedule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.UpdateBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> schedule.BackupSchedule: + r"""Call the update backup schedule method over HTTP. + + Args: + request (~.firestore_admin.UpdateBackupScheduleRequest): + The request object. The request for + [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schedule.BackupSchedule: + A backup schedule for a Cloud + Firestore Database. + This resource is owned by the database + it is backing up, and is deleted along + with the database. The actual backups + are not though. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}', + 'body': 'backup_schedule', + }, + ] + request, metadata = self._interceptor.pre_update_backup_schedule(request, metadata) + pb_request = firestore_admin.UpdateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schedule.BackupSchedule() + pb_resp = schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup_schedule(resp) + return resp + + class _UpdateDatabase(FirestoreAdminRestStub): + def __hash__(self): + return hash("UpdateDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.UpdateDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update database method over HTTP. + + Args: + request (~.firestore_admin.UpdateDatabaseRequest): + The request object. The request for + [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{database.name=projects/*/databases/*}', + 'body': 'database', + }, + ] + request, metadata = self._interceptor.pre_update_database(request, metadata) + pb_request = firestore_admin.UpdateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_database(resp) + return resp + + class _UpdateField(FirestoreAdminRestStub): + def __hash__(self): + return hash("UpdateField") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: firestore_admin.UpdateFieldRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update field method over HTTP. + + Args: + request (~.firestore_admin.UpdateFieldRequest): + The request object. The request for + [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}', + 'body': 'field', + }, + ] + request, metadata = self._interceptor.pre_update_field(request, metadata) + pb_request = firestore_admin.UpdateFieldRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_field(resp) + return resp + + @property + def create_backup_schedule(self) -> Callable[ + [firestore_admin.CreateBackupScheduleRequest], + schedule.BackupSchedule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_database(self) -> Callable[ + [firestore_admin.CreateDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_index(self) -> Callable[ + [firestore_admin.CreateIndexRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateIndex(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup(self) -> Callable[ + [firestore_admin.DeleteBackupRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_schedule(self) -> Callable[ + [firestore_admin.DeleteBackupScheduleRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_database(self) -> Callable[ + [firestore_admin.DeleteDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_index(self) -> Callable[ + [firestore_admin.DeleteIndexRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteIndex(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_documents(self) -> Callable[ + [firestore_admin.ExportDocumentsRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup(self) -> Callable[ + [firestore_admin.GetBackupRequest], + backup.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_schedule(self) -> Callable[ + [firestore_admin.GetBackupScheduleRequest], + schedule.BackupSchedule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_database(self) -> Callable[ + [firestore_admin.GetDatabaseRequest], + database.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_field(self) -> Callable[ + [firestore_admin.GetFieldRequest], + field.Field]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetField(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_index(self) -> Callable[ + [firestore_admin.GetIndexRequest], + index.Index]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIndex(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_documents(self) -> Callable[ + [firestore_admin.ImportDocumentsRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups(self) -> Callable[ + [firestore_admin.ListBackupsRequest], + firestore_admin.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_schedules(self) -> Callable[ + [firestore_admin.ListBackupSchedulesRequest], + firestore_admin.ListBackupSchedulesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupSchedules(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_databases(self) -> Callable[ + [firestore_admin.ListDatabasesRequest], + firestore_admin.ListDatabasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_fields(self) -> Callable[ + [firestore_admin.ListFieldsRequest], + firestore_admin.ListFieldsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListFields(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_indexes(self) -> Callable[ + [firestore_admin.ListIndexesRequest], + firestore_admin.ListIndexesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListIndexes(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_database(self) -> Callable[ + [firestore_admin.RestoreDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_schedule(self) -> Callable[ + [firestore_admin.UpdateBackupScheduleRequest], + schedule.BackupSchedule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_database(self) -> Callable[ + [firestore_admin.UpdateDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_field(self) -> Callable[ + [firestore_admin.UpdateFieldRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateField(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(FirestoreAdminRestStub): + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/databases/*/operations/*}:cancel', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(FirestoreAdminRestStub): + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/databases/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(FirestoreAdminRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(FirestoreAdminRestStub): + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/databases/*}/operations', + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'FirestoreAdminRestTransport', +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/__init__.py new file mode 100644 index 0000000000..ea202681cd --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/__init__.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .backup import ( + Backup, +) +from .database import ( + Database, +) +from .field import ( + Field, +) +from .firestore_admin import ( + CreateBackupScheduleRequest, + CreateDatabaseMetadata, + CreateDatabaseRequest, + CreateIndexRequest, + DeleteBackupRequest, + DeleteBackupScheduleRequest, + DeleteDatabaseMetadata, + DeleteDatabaseRequest, + DeleteIndexRequest, + ExportDocumentsRequest, + GetBackupRequest, + GetBackupScheduleRequest, + GetDatabaseRequest, + GetFieldRequest, + GetIndexRequest, + ImportDocumentsRequest, + ListBackupSchedulesRequest, + ListBackupSchedulesResponse, + ListBackupsRequest, + ListBackupsResponse, + ListDatabasesRequest, + ListDatabasesResponse, + ListFieldsRequest, + ListFieldsResponse, + ListIndexesRequest, + ListIndexesResponse, + RestoreDatabaseRequest, + UpdateBackupScheduleRequest, + UpdateDatabaseMetadata, + UpdateDatabaseRequest, + UpdateFieldRequest, +) +from .index import ( + Index, +) +from .location import ( + LocationMetadata, +) +from .operation import ( + ExportDocumentsMetadata, + ExportDocumentsResponse, + FieldOperationMetadata, + ImportDocumentsMetadata, + IndexOperationMetadata, + Progress, + RestoreDatabaseMetadata, + OperationState, +) +from .schedule import ( + BackupSchedule, + DailyRecurrence, + WeeklyRecurrence, +) + +__all__ = ( + 'Backup', + 'Database', + 'Field', + 'CreateBackupScheduleRequest', + 'CreateDatabaseMetadata', + 'CreateDatabaseRequest', + 'CreateIndexRequest', + 'DeleteBackupRequest', + 'DeleteBackupScheduleRequest', + 'DeleteDatabaseMetadata', + 'DeleteDatabaseRequest', + 'DeleteIndexRequest', + 'ExportDocumentsRequest', + 'GetBackupRequest', + 'GetBackupScheduleRequest', + 'GetDatabaseRequest', + 'GetFieldRequest', + 'GetIndexRequest', + 'ImportDocumentsRequest', + 'ListBackupSchedulesRequest', + 'ListBackupSchedulesResponse', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'ListFieldsRequest', + 'ListFieldsResponse', + 'ListIndexesRequest', + 'ListIndexesResponse', + 'RestoreDatabaseRequest', + 'UpdateBackupScheduleRequest', + 'UpdateDatabaseMetadata', + 'UpdateDatabaseRequest', + 'UpdateFieldRequest', + 'Index', + 'LocationMetadata', + 'ExportDocumentsMetadata', + 'ExportDocumentsResponse', + 'FieldOperationMetadata', + 'ImportDocumentsMetadata', + 'IndexOperationMetadata', + 'Progress', + 'RestoreDatabaseMetadata', + 'OperationState', + 'BackupSchedule', + 'DailyRecurrence', + 'WeeklyRecurrence', +) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/backup.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/backup.py new file mode 100644 index 0000000000..baa5c8153f --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/backup.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.admin.v1', + manifest={ + 'Backup', + }, +) + + +class Backup(proto.Message): + r"""A Backup of a Cloud Firestore Database. + + The backup contains all documents and index configurations for + the given database at a specific point in time. + + Attributes: + name (str): + Output only. The unique resource name of the Backup. + + Format is + ``projects/{project}/locations/{location}/backups/{backup}``. + database (str): + Output only. Name of the Firestore database that the backup + is from. + + Format is ``projects/{project}/databases/{database}``. + database_uid (str): + Output only. The system-generated UUID4 for + the Firestore database that the backup is from. + snapshot_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The backup contains an + externally consistent copy of the database at + this time. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp at which this + backup expires. + stats (google.cloud.firestore_admin_v1.types.Backup.Stats): + Output only. Statistics about the backup. + + This data only becomes available after the + backup is fully materialized to secondary + storage. This field will be empty till then. + state (google.cloud.firestore_admin_v1.types.Backup.State): + Output only. The current state of the backup. + """ + class State(proto.Enum): + r"""Indicate the current state of the backup. + + Values: + STATE_UNSPECIFIED (0): + The state is unspecified. + CREATING (1): + The pending backup is still being created. + Operations on the backup will be rejected in + this state. + READY (2): + The backup is complete and ready to use. + NOT_AVAILABLE (3): + The backup is not available at this moment. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + NOT_AVAILABLE = 3 + + class Stats(proto.Message): + r"""Backup specific statistics. + + Attributes: + size_bytes (int): + Output only. Summation of the size of all + documents and index entries in the backup, + measured in bytes. + document_count (int): + Output only. The total number of documents + contained in the backup. + index_count (int): + Output only. The total number of index + entries contained in the backup. + """ + + size_bytes: int = proto.Field( + proto.INT64, + number=1, + ) + document_count: int = proto.Field( + proto.INT64, + number=2, + ) + index_count: int = proto.Field( + proto.INT64, + number=3, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + database_uid: str = proto.Field( + proto.STRING, + number=7, + ) + snapshot_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + stats: Stats = proto.Field( + proto.MESSAGE, + number=6, + message=Stats, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/database.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/database.py new file mode 100644 index 0000000000..2ed970a159 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/database.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.admin.v1', + manifest={ + 'Database', + }, +) + + +class Database(proto.Message): + r"""A Cloud Firestore Database. + + Attributes: + name (str): + The resource name of the Database. Format: + ``projects/{project}/databases/{database}`` + uid (str): + Output only. The system-generated UUID4 for + this Database. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp at which this database was + created. Databases created before 2016 do not populate + create_time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp at which this + database was most recently updated. Note this + only includes updates to the database resource + and not data contained by the database. + location_id (str): + The location of the database. Available + locations are listed at + https://cloud.google.com/firestore/docs/locations. + type_ (google.cloud.firestore_admin_v1.types.Database.DatabaseType): + The type of the database. + See + https://cloud.google.com/datastore/docs/firestore-or-datastore + for information about how to choose. + concurrency_mode (google.cloud.firestore_admin_v1.types.Database.ConcurrencyMode): + The concurrency control mode to use for this + database. + version_retention_period (google.protobuf.duration_pb2.Duration): + Output only. The period during which past versions of data + are retained in the database. + + Any [read][google.firestore.v1.GetDocumentRequest.read_time] + or + [query][google.firestore.v1.ListDocumentsRequest.read_time] + can specify a ``read_time`` within this window, and will + read the state of the database at that time. + + If the PITR feature is enabled, the retention period is 7 + days. Otherwise, the retention period is 1 hour. + earliest_version_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The earliest timestamp at which older versions + of the data can be read from the database. See + [version_retention_period] above; this field is populated + with ``now - version_retention_period``. + + This value is continuously updated, and becomes stale the + moment it is queried. If you are using this value to recover + data, make sure to account for the time from the moment when + the value is queried to the moment when you initiate the + recovery. + point_in_time_recovery_enablement (google.cloud.firestore_admin_v1.types.Database.PointInTimeRecoveryEnablement): + Whether to enable the PITR feature on this + database. + app_engine_integration_mode (google.cloud.firestore_admin_v1.types.Database.AppEngineIntegrationMode): + The App Engine integration mode to use for + this database. + key_prefix (str): + Output only. The key_prefix for this database. This + key_prefix is used, in combination with the project id ("~") + to construct the application id that is returned from the + Cloud Datastore APIs in Google App Engine first generation + runtimes. + + This value may be empty in which case the appid to use for + URL-encoded keys is the project_id (eg: foo instead of + v~foo). + delete_protection_state (google.cloud.firestore_admin_v1.types.Database.DeleteProtectionState): + State of delete protection for the database. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. + """ + class DatabaseType(proto.Enum): + r"""The type of the database. + See + https://cloud.google.com/datastore/docs/firestore-or-datastore + for information about how to choose. + + Mode changes are only allowed if the database is empty. + + Values: + DATABASE_TYPE_UNSPECIFIED (0): + The default value. This value is used if the + database type is omitted. + FIRESTORE_NATIVE (1): + Firestore Native Mode + DATASTORE_MODE (2): + Firestore in Datastore Mode. + """ + DATABASE_TYPE_UNSPECIFIED = 0 + FIRESTORE_NATIVE = 1 + DATASTORE_MODE = 2 + + class ConcurrencyMode(proto.Enum): + r"""The type of concurrency control mode for transactions. + + Values: + CONCURRENCY_MODE_UNSPECIFIED (0): + Not used. + OPTIMISTIC (1): + Use optimistic concurrency control by + default. This mode is available for Cloud + Firestore databases. + PESSIMISTIC (2): + Use pessimistic concurrency control by + default. This mode is available for Cloud + Firestore databases. + + This is the default setting for Cloud Firestore. + OPTIMISTIC_WITH_ENTITY_GROUPS (3): + Use optimistic concurrency control with + entity groups by default. + This is the only available mode for Cloud + Datastore. + + This mode is also available for Cloud Firestore + with Datastore Mode but is not recommended. + """ + CONCURRENCY_MODE_UNSPECIFIED = 0 + OPTIMISTIC = 1 + PESSIMISTIC = 2 + OPTIMISTIC_WITH_ENTITY_GROUPS = 3 + + class PointInTimeRecoveryEnablement(proto.Enum): + r"""Point In Time Recovery feature enablement. + + Values: + POINT_IN_TIME_RECOVERY_ENABLEMENT_UNSPECIFIED (0): + Not used. + POINT_IN_TIME_RECOVERY_ENABLED (1): + Reads are supported on selected versions of the data from + within the past 7 days: + + - Reads against any timestamp within the past hour + - Reads against 1-minute snapshots beyond 1 hour and within + 7 days + + ``version_retention_period`` and ``earliest_version_time`` + can be used to determine the supported versions. + POINT_IN_TIME_RECOVERY_DISABLED (2): + Reads are supported on any version of the + data from within the past 1 hour. + """ + POINT_IN_TIME_RECOVERY_ENABLEMENT_UNSPECIFIED = 0 + POINT_IN_TIME_RECOVERY_ENABLED = 1 + POINT_IN_TIME_RECOVERY_DISABLED = 2 + + class AppEngineIntegrationMode(proto.Enum): + r"""The type of App Engine integration mode. + + Values: + APP_ENGINE_INTEGRATION_MODE_UNSPECIFIED (0): + Not used. + ENABLED (1): + If an App Engine application exists in the + same region as this database, App Engine + configuration will impact this database. This + includes disabling of the application & + database, as well as disabling writes to the + database. + DISABLED (2): + App Engine has no effect on the ability of + this database to serve requests. + + This is the default setting for databases + created with the Firestore API. + """ + APP_ENGINE_INTEGRATION_MODE_UNSPECIFIED = 0 + ENABLED = 1 + DISABLED = 2 + + class DeleteProtectionState(proto.Enum): + r"""The delete protection state of the database. + + Values: + DELETE_PROTECTION_STATE_UNSPECIFIED (0): + The default value. Delete protection type is + not specified + DELETE_PROTECTION_DISABLED (1): + Delete protection is disabled + DELETE_PROTECTION_ENABLED (2): + Delete protection is enabled + """ + DELETE_PROTECTION_STATE_UNSPECIFIED = 0 + DELETE_PROTECTION_DISABLED = 1 + DELETE_PROTECTION_ENABLED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + location_id: str = proto.Field( + proto.STRING, + number=9, + ) + type_: DatabaseType = proto.Field( + proto.ENUM, + number=10, + enum=DatabaseType, + ) + concurrency_mode: ConcurrencyMode = proto.Field( + proto.ENUM, + number=15, + enum=ConcurrencyMode, + ) + version_retention_period: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=17, + message=duration_pb2.Duration, + ) + earliest_version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) + point_in_time_recovery_enablement: PointInTimeRecoveryEnablement = proto.Field( + proto.ENUM, + number=21, + enum=PointInTimeRecoveryEnablement, + ) + app_engine_integration_mode: AppEngineIntegrationMode = proto.Field( + proto.ENUM, + number=19, + enum=AppEngineIntegrationMode, + ) + key_prefix: str = proto.Field( + proto.STRING, + number=20, + ) + delete_protection_state: DeleteProtectionState = proto.Field( + proto.ENUM, + number=22, + enum=DeleteProtectionState, + ) + etag: str = proto.Field( + proto.STRING, + number=99, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/field.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/field.py new file mode 100644 index 0000000000..b0093a22c3 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/field.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.firestore_admin_v1.types import index + + +__protobuf__ = proto.module( + package='google.firestore.admin.v1', + manifest={ + 'Field', + }, +) + + +class Field(proto.Message): + r"""Represents a single field in the database. + + Fields are grouped by their "Collection Group", which represent + all collections in the database with the same id. + + Attributes: + name (str): + Required. A field name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`` + + A field path may be a simple field name, e.g. ``address`` or + a path to fields within map_value , e.g. ``address.city``, + or a special field path. The only valid special field is + ``*``, which represents any field. + + Field paths may be quoted using + ``(backtick). The only character that needs to be escaped within a quoted field path is the backtick character itself, escaped using a backslash. Special characters in field paths that must be quoted include:``\ \*\ ``,``.\ :literal:`, ``` (backtick),`\ [``,``]`, + as well as any ascii symbolic characters. + + Examples: (Note: Comments here are written in markdown + syntax, so there is an additional layer of backticks to + represent a code block) + ``\``\ address.city\`\ ``represents a field named``\ address.city\ ``, not the map key``\ city\ ``in the field``\ address\ ``.``\ \`\ *\`\ ``represents a field named``*\ \`, + not any field. + + A special ``Field`` contains the default indexing settings + for all fields. This field's resource name is: + ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`` + Indexes defined on this ``Field`` will be applied to all + fields which do not have their own ``Field`` index + configuration. + index_config (google.cloud.firestore_admin_v1.types.Field.IndexConfig): + The index configuration for this field. If unset, field + indexing will revert to the configuration defined by the + ``ancestor_field``. To explicitly remove all indexes for + this field, specify an index config with an empty list of + indexes. + ttl_config (google.cloud.firestore_admin_v1.types.Field.TtlConfig): + The TTL configuration for this ``Field``. Setting or + unsetting this will enable or disable the TTL for documents + that have this ``Field``. + """ + + class IndexConfig(proto.Message): + r"""The index configuration for this field. + + Attributes: + indexes (MutableSequence[google.cloud.firestore_admin_v1.types.Index]): + The indexes supported for this field. + uses_ancestor_config (bool): + Output only. When true, the ``Field``'s index configuration + is set from the configuration specified by the + ``ancestor_field``. When false, the ``Field``'s index + configuration is defined explicitly. + ancestor_field (str): + Output only. Specifies the resource name of the ``Field`` + from which this field's index configuration is set (when + ``uses_ancestor_config`` is true), or from which it *would* + be set if this field had no index configuration (when + ``uses_ancestor_config`` is false). + reverting (bool): + Output only When true, the ``Field``'s index configuration + is in the process of being reverted. Once complete, the + index config will transition to the same state as the field + specified by ``ancestor_field``, at which point + ``uses_ancestor_config`` will be ``true`` and ``reverting`` + will be ``false``. + """ + + indexes: MutableSequence[index.Index] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=index.Index, + ) + uses_ancestor_config: bool = proto.Field( + proto.BOOL, + number=2, + ) + ancestor_field: str = proto.Field( + proto.STRING, + number=3, + ) + reverting: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class TtlConfig(proto.Message): + r"""The TTL (time-to-live) configuration for documents that have this + ``Field`` set. Storing a timestamp value into a TTL-enabled field + will be treated as the document's absolute expiration time. Using + any other data type or leaving the field absent will disable the TTL + for the individual document. + + Attributes: + state (google.cloud.firestore_admin_v1.types.Field.TtlConfig.State): + Output only. The state of the TTL + configuration. + """ + class State(proto.Enum): + r"""The state of applying the TTL configuration to all documents. + + Values: + STATE_UNSPECIFIED (0): + The state is unspecified or unknown. + CREATING (1): + The TTL is being applied. There is an active + long-running operation to track the change. + Newly written documents will have TTLs applied + as requested. Requested TTLs on existing + documents are still being processed. When TTLs + on all existing documents have been processed, + the state will move to 'ACTIVE'. + ACTIVE (2): + The TTL is active for all documents. + NEEDS_REPAIR (3): + The TTL configuration could not be enabled for all existing + documents. Newly written documents will continue to have + their TTL applied. The LRO returned when last attempting to + enable TTL for this ``Field`` has failed, and may have more + details. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + NEEDS_REPAIR = 3 + + state: 'Field.TtlConfig.State' = proto.Field( + proto.ENUM, + number=1, + enum='Field.TtlConfig.State', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + index_config: IndexConfig = proto.Field( + proto.MESSAGE, + number=2, + message=IndexConfig, + ) + ttl_config: TtlConfig = proto.Field( + proto.MESSAGE, + number=3, + message=TtlConfig, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/firestore_admin.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/firestore_admin.py new file mode 100644 index 0000000000..db39122110 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/firestore_admin.py @@ -0,0 +1,815 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.firestore_admin_v1.types import backup as gfa_backup +from google.cloud.firestore_admin_v1.types import database as gfa_database +from google.cloud.firestore_admin_v1.types import field as gfa_field +from google.cloud.firestore_admin_v1.types import index as gfa_index +from google.cloud.firestore_admin_v1.types import schedule +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.admin.v1', + manifest={ + 'ListDatabasesRequest', + 'CreateDatabaseRequest', + 'CreateDatabaseMetadata', + 'ListDatabasesResponse', + 'GetDatabaseRequest', + 'UpdateDatabaseRequest', + 'UpdateDatabaseMetadata', + 'DeleteDatabaseRequest', + 'DeleteDatabaseMetadata', + 'CreateBackupScheduleRequest', + 'GetBackupScheduleRequest', + 'UpdateBackupScheduleRequest', + 'ListBackupSchedulesRequest', + 'ListBackupSchedulesResponse', + 'DeleteBackupScheduleRequest', + 'CreateIndexRequest', + 'ListIndexesRequest', + 'ListIndexesResponse', + 'GetIndexRequest', + 'DeleteIndexRequest', + 'UpdateFieldRequest', + 'GetFieldRequest', + 'ListFieldsRequest', + 'ListFieldsResponse', + 'ExportDocumentsRequest', + 'ImportDocumentsRequest', + 'GetBackupRequest', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'DeleteBackupRequest', + 'RestoreDatabaseRequest', + }, +) + + +class ListDatabasesRequest(proto.Message): + r"""A request to list the Firestore Databases in all locations + for a project. + + Attributes: + parent (str): + Required. A parent name of the form + ``projects/{project_id}`` + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDatabaseRequest(proto.Message): + r"""The request for + [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. + + Attributes: + parent (str): + Required. A parent name of the form + ``projects/{project_id}`` + database (google.cloud.firestore_admin_v1.types.Database): + Required. The Database to create. + database_id (str): + Required. The ID to use for the database, which will become + the final component of the database's resource name. + + This value should be 4-63 characters. Valid characters are + /[a-z][0-9]-/ with first character a letter and the last a + letter or a number. Must not be UUID-like + /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. + + "(default)" database id is also valid. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + database: gfa_database.Database = proto.Field( + proto.MESSAGE, + number=2, + message=gfa_database.Database, + ) + database_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateDatabaseMetadata(proto.Message): + r"""Metadata related to the create database operation. + """ + + +class ListDatabasesResponse(proto.Message): + r"""The list of databases for a project. + + Attributes: + databases (MutableSequence[google.cloud.firestore_admin_v1.types.Database]): + The databases in the project. + unreachable (MutableSequence[str]): + In the event that data about individual databases cannot be + listed they will be recorded here. + + An example entry might be: + projects/some_project/locations/some_location This can + happen if the Cloud Region that the Database resides in is + currently unavailable. In this case we can't fetch all the + details about the database. You may be able to get a more + detailed error message (or possibly fetch the resource) by + sending a 'Get' request for the resource or a 'List' request + for the specific location. + """ + + databases: MutableSequence[gfa_database.Database] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gfa_database.Database, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetDatabaseRequest(proto.Message): + r"""The request for + [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. + + Attributes: + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDatabaseRequest(proto.Message): + r"""The request for + [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. + + Attributes: + database (google.cloud.firestore_admin_v1.types.Database): + Required. The database to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. + """ + + database: gfa_database.Database = proto.Field( + proto.MESSAGE, + number=1, + message=gfa_database.Database, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class UpdateDatabaseMetadata(proto.Message): + r"""Metadata related to the update database operation. + """ + + +class DeleteDatabaseRequest(proto.Message): + r"""The request for + [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. + + Attributes: + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}`` + etag (str): + The current etag of the Database. If an etag is provided and + does not match the current etag of the database, deletion + will be blocked and a FAILED_PRECONDITION error will be + returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteDatabaseMetadata(proto.Message): + r"""Metadata related to the delete database operation. + """ + + +class CreateBackupScheduleRequest(proto.Message): + r"""The request for + [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. + + Attributes: + parent (str): + Required. The parent database. + + Format ``projects/{project}/databases/{database}`` + backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): + Required. The backup schedule to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_schedule: schedule.BackupSchedule = proto.Field( + proto.MESSAGE, + number=2, + message=schedule.BackupSchedule, + ) + + +class GetBackupScheduleRequest(proto.Message): + r"""The request for + [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. + + Attributes: + name (str): + Required. The name of the backup schedule. + + Format + ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateBackupScheduleRequest(proto.Message): + r"""The request for + [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. + + Attributes: + backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): + Required. The backup schedule to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. + """ + + backup_schedule: schedule.BackupSchedule = proto.Field( + proto.MESSAGE, + number=1, + message=schedule.BackupSchedule, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListBackupSchedulesRequest(proto.Message): + r"""The request for + [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. + + Attributes: + parent (str): + Required. The parent database. + + Format is ``projects/{project}/databases/{database}``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupSchedulesResponse(proto.Message): + r"""The response for + [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. + + Attributes: + backup_schedules (MutableSequence[google.cloud.firestore_admin_v1.types.BackupSchedule]): + List of all backup schedules. + """ + + backup_schedules: MutableSequence[schedule.BackupSchedule] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=schedule.BackupSchedule, + ) + + +class DeleteBackupScheduleRequest(proto.Message): + r"""The request for [FirestoreAdmin.DeleteBackupSchedules][]. + + Attributes: + name (str): + Required. The name of the backup schedule. + + Format + ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateIndexRequest(proto.Message): + r"""The request for + [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. + + Attributes: + parent (str): + Required. A parent name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` + index (google.cloud.firestore_admin_v1.types.Index): + Required. The composite index to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + index: gfa_index.Index = proto.Field( + proto.MESSAGE, + number=2, + message=gfa_index.Index, + ) + + +class ListIndexesRequest(proto.Message): + r"""The request for + [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. + + Attributes: + parent (str): + Required. A parent name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` + filter (str): + The filter to apply to list results. + page_size (int): + The number of results to return. + page_token (str): + A page token, returned from a previous call to + [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes], + that may be used to get the next page of results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListIndexesResponse(proto.Message): + r"""The response for + [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. + + Attributes: + indexes (MutableSequence[google.cloud.firestore_admin_v1.types.Index]): + The requested indexes. + next_page_token (str): + A page token that may be used to request + another page of results. If blank, this is the + last page. + """ + + @property + def raw_page(self): + return self + + indexes: MutableSequence[gfa_index.Index] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gfa_index.Index, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetIndexRequest(proto.Message): + r"""The request for + [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. + + Attributes: + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteIndexRequest(proto.Message): + r"""The request for + [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. + + Attributes: + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateFieldRequest(proto.Message): + r"""The request for + [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. + + Attributes: + field (google.cloud.firestore_admin_v1.types.Field): + Required. The field to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask, relative to the field. If specified, only + configuration specified by this field_mask will be updated + in the field. + """ + + field: gfa_field.Field = proto.Field( + proto.MESSAGE, + number=1, + message=gfa_field.Field, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetFieldRequest(proto.Message): + r"""The request for + [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. + + Attributes: + name (str): + Required. A name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListFieldsRequest(proto.Message): + r"""The request for + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. + + Attributes: + parent (str): + Required. A parent name of the form + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` + filter (str): + The filter to apply to list results. Currently, + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + only supports listing fields that have been explicitly + overridden. To issue this query, call + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] + with a filter that includes + ``indexConfig.usesAncestorConfig:false`` . + page_size (int): + The number of results to return. + page_token (str): + A page token, returned from a previous call to + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields], + that may be used to get the next page of results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListFieldsResponse(proto.Message): + r"""The response for + [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. + + Attributes: + fields (MutableSequence[google.cloud.firestore_admin_v1.types.Field]): + The requested fields. + next_page_token (str): + A page token that may be used to request + another page of results. If blank, this is the + last page. + """ + + @property + def raw_page(self): + return self + + fields: MutableSequence[gfa_field.Field] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gfa_field.Field, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ExportDocumentsRequest(proto.Message): + r"""The request for + [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. + + Attributes: + name (str): + Required. Database to export. Should be of the form: + ``projects/{project_id}/databases/{database_id}``. + collection_ids (MutableSequence[str]): + Which collection ids to export. Unspecified + means all collections. + output_uri_prefix (str): + The output URI. Currently only supports Google Cloud Storage + URIs of the form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, + where ``BUCKET_NAME`` is the name of the Google Cloud + Storage bucket and ``NAMESPACE_PATH`` is an optional Google + Cloud Storage namespace path. When choosing a name, be sure + to consider Google Cloud Storage naming guidelines: + https://cloud.google.com/storage/docs/naming. If the URI is + a bucket (without a namespace path), a prefix will be + generated based on the start time. + namespace_ids (MutableSequence[str]): + An empty list represents all namespaces. This + is the preferred usage for databases that don't + use namespaces. + + An empty string element represents the default + namespace. This should be used if the database + has data in non-default namespaces, but doesn't + want to include them. Each namespace in this + list must be unique. + snapshot_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp that corresponds to the version of the + database to be exported. The timestamp must be in the past, + rounded to the minute and not older than + [earliestVersionTime][google.firestore.admin.v1.Database.earliest_version_time]. + If specified, then the exported documents will represent a + consistent view of the database at the provided time. + Otherwise, there are no guarantees about the consistency of + the exported documents. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + collection_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + output_uri_prefix: str = proto.Field( + proto.STRING, + number=3, + ) + namespace_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + snapshot_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ImportDocumentsRequest(proto.Message): + r"""The request for + [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. + + Attributes: + name (str): + Required. Database to import into. Should be of the form: + ``projects/{project_id}/databases/{database_id}``. + collection_ids (MutableSequence[str]): + Which collection ids to import. Unspecified + means all collections included in the import. + input_uri_prefix (str): + Location of the exported files. This must match the + output_uri_prefix of an ExportDocumentsResponse from an + export that has completed successfully. See: + [google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix]. + namespace_ids (MutableSequence[str]): + An empty list represents all namespaces. This + is the preferred usage for databases that don't + use namespaces. + + An empty string element represents the default + namespace. This should be used if the database + has data in non-default namespaces, but doesn't + want to include them. Each namespace in this + list must be unique. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + collection_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + input_uri_prefix: str = proto.Field( + proto.STRING, + number=3, + ) + namespace_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class GetBackupRequest(proto.Message): + r"""The request for + [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. + + Attributes: + name (str): + Required. Name of the backup to fetch. + + Format is + ``projects/{project}/locations/{location}/backups/{backup}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupsRequest(proto.Message): + r"""The request for + [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. + + Attributes: + parent (str): + Required. The location to list backups from. + + Format is ``projects/{project}/locations/{location}``. Use + ``{location} = '-'`` to list backups from all locations for + the given project. This allows listing backups from a single + location or from all locations. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupsResponse(proto.Message): + r"""The response for + [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. + + Attributes: + backups (MutableSequence[google.cloud.firestore_admin_v1.types.Backup]): + List of all backups for the project. + unreachable (MutableSequence[str]): + List of locations that existing backups were + not able to be fetched from. + Instead of failing the entire requests when a + single location is unreachable, this response + returns a partial result set and list of + locations unable to be reached here. The request + can be retried against a single location to get + a concrete error. + """ + + backups: MutableSequence[gfa_backup.Backup] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gfa_backup.Backup, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeleteBackupRequest(proto.Message): + r"""The request for + [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. + + Attributes: + name (str): + Required. Name of the backup to delete. + + format is + ``projects/{project}/locations/{location}/backups/{backup}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RestoreDatabaseRequest(proto.Message): + r"""The request message for + [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.RestoreDatabase]. + + Attributes: + parent (str): + Required. The project to restore the database in. Format is + ``projects/{project_id}``. + database_id (str): + Required. The ID to use for the database, which will become + the final component of the database's resource name. This + database id must not be associated with an existing + database. + + This value should be 4-63 characters. Valid characters are + /[a-z][0-9]-/ with first character a letter and the last a + letter or a number. Must not be UUID-like + /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. + + "(default)" database id is also valid. + backup (str): + Required. Backup to restore from. Must be from the same + project as the parent. + + Format is: + ``projects/{project_id}/locations/{location}/backups/{backup}`` + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + database_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/index.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/index.py new file mode 100644 index 0000000000..727fa12699 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/index.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.admin.v1', + manifest={ + 'Index', + }, +) + + +class Index(proto.Message): + r"""Cloud Firestore indexes enable simple and complex queries + against documents in a database. + + Attributes: + name (str): + Output only. A server defined name for this index. The form + of this name for composite indexes will be: + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}`` + For single field indexes, this field will be empty. + query_scope (google.cloud.firestore_admin_v1.types.Index.QueryScope): + Indexes with a collection query scope + specified allow queries against a collection + that is the child of a specific document, + specified at query time, and that has the same + collection id. + + Indexes with a collection group query scope + specified allow queries against all collections + descended from a specific document, specified at + query time, and that have the same collection id + as this index. + api_scope (google.cloud.firestore_admin_v1.types.Index.ApiScope): + The API scope supported by this index. + fields (MutableSequence[google.cloud.firestore_admin_v1.types.Index.IndexField]): + The fields supported by this index. + + For composite indexes, this requires a minimum of 2 and a + maximum of 100 fields. The last field entry is always for + the field path ``__name__``. If, on creation, ``__name__`` + was not specified as the last field, it will be added + automatically with the same direction as that of the last + field defined. If the final field in a composite index is + not directional, the ``__name__`` will be ordered ASCENDING + (unless explicitly specified). + + For single field indexes, this will always be exactly one + entry with a field path equal to the field path of the + associated field. + state (google.cloud.firestore_admin_v1.types.Index.State): + Output only. The serving state of the index. + """ + class QueryScope(proto.Enum): + r"""Query Scope defines the scope at which a query is run. This is + specified on a StructuredQuery's ``from`` field. + + Values: + QUERY_SCOPE_UNSPECIFIED (0): + The query scope is unspecified. Not a valid + option. + COLLECTION (1): + Indexes with a collection query scope + specified allow queries against a collection + that is the child of a specific document, + specified at query time, and that has the + collection id specified by the index. + COLLECTION_GROUP (2): + Indexes with a collection group query scope + specified allow queries against all collections + that has the collection id specified by the + index. + COLLECTION_RECURSIVE (3): + Include all the collections's ancestor in the + index. Only available for Datastore Mode + databases. + """ + QUERY_SCOPE_UNSPECIFIED = 0 + COLLECTION = 1 + COLLECTION_GROUP = 2 + COLLECTION_RECURSIVE = 3 + + class ApiScope(proto.Enum): + r"""API Scope defines the APIs (Firestore Native, or Firestore in + Datastore Mode) that are supported for queries. + + Values: + ANY_API (0): + The index can only be used by the Firestore + Native query API. This is the default. + DATASTORE_MODE_API (1): + The index can only be used by the Firestore + in Datastore Mode query API. + """ + ANY_API = 0 + DATASTORE_MODE_API = 1 + + class State(proto.Enum): + r"""The state of an index. During index creation, an index will be in + the ``CREATING`` state. If the index is created successfully, it + will transition to the ``READY`` state. If the index creation + encounters a problem, the index will transition to the + ``NEEDS_REPAIR`` state. + + Values: + STATE_UNSPECIFIED (0): + The state is unspecified. + CREATING (1): + The index is being created. + There is an active long-running operation for + the index. The index is updated when writing a + document. Some index data may exist. + READY (2): + The index is ready to be used. + The index is updated when writing a document. + The index is fully populated from all stored + documents it applies to. + NEEDS_REPAIR (3): + The index was being created, but something + went wrong. There is no active long-running + operation for the index, and the most recently + finished long-running operation failed. The + index is not updated when writing a document. + Some index data may exist. + Use the google.longrunning.Operations API to + determine why the operation that last attempted + to create this index failed, then re-create the + index. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + NEEDS_REPAIR = 3 + + class IndexField(proto.Message): + r"""A field in an index. The field_path describes which field is + indexed, the value_mode describes how the field value is indexed. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field_path (str): + Can be **name**. For single field indexes, this must match + the name of the field or may be omitted. + order (google.cloud.firestore_admin_v1.types.Index.IndexField.Order): + Indicates that this field supports ordering + by the specified order or comparing using =, !=, + <, <=, >, >=. + + This field is a member of `oneof`_ ``value_mode``. + array_config (google.cloud.firestore_admin_v1.types.Index.IndexField.ArrayConfig): + Indicates that this field supports operations on + ``array_value``\ s. + + This field is a member of `oneof`_ ``value_mode``. + vector_config (google.cloud.firestore_admin_v1.types.Index.IndexField.VectorConfig): + Indicates that this field supports nearest + neighbors and distance operations on vector. + + This field is a member of `oneof`_ ``value_mode``. + """ + class Order(proto.Enum): + r"""The supported orderings. + + Values: + ORDER_UNSPECIFIED (0): + The ordering is unspecified. Not a valid + option. + ASCENDING (1): + The field is ordered by ascending field + value. + DESCENDING (2): + The field is ordered by descending field + value. + """ + ORDER_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + class ArrayConfig(proto.Enum): + r"""The supported array value configurations. + + Values: + ARRAY_CONFIG_UNSPECIFIED (0): + The index does not support additional array + queries. + CONTAINS (1): + The index supports array containment queries. + """ + ARRAY_CONFIG_UNSPECIFIED = 0 + CONTAINS = 1 + + class VectorConfig(proto.Message): + r"""The index configuration to support vector search operations + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimension (int): + Required. The vector dimension this + configuration applies to. + The resulting index will only include vectors of + this dimension, and can be used for vector + search with the same dimension. + flat (google.cloud.firestore_admin_v1.types.Index.IndexField.VectorConfig.FlatIndex): + Indicates the vector index is a flat index. + + This field is a member of `oneof`_ ``type``. + """ + + class FlatIndex(proto.Message): + r"""An index that stores vectors in a flat data structure, and + supports exhaustive search. + + """ + + dimension: int = proto.Field( + proto.INT32, + number=1, + ) + flat: 'Index.IndexField.VectorConfig.FlatIndex' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='Index.IndexField.VectorConfig.FlatIndex', + ) + + field_path: str = proto.Field( + proto.STRING, + number=1, + ) + order: 'Index.IndexField.Order' = proto.Field( + proto.ENUM, + number=2, + oneof='value_mode', + enum='Index.IndexField.Order', + ) + array_config: 'Index.IndexField.ArrayConfig' = proto.Field( + proto.ENUM, + number=3, + oneof='value_mode', + enum='Index.IndexField.ArrayConfig', + ) + vector_config: 'Index.IndexField.VectorConfig' = proto.Field( + proto.MESSAGE, + number=4, + oneof='value_mode', + message='Index.IndexField.VectorConfig', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + query_scope: QueryScope = proto.Field( + proto.ENUM, + number=2, + enum=QueryScope, + ) + api_scope: ApiScope = proto.Field( + proto.ENUM, + number=5, + enum=ApiScope, + ) + fields: MutableSequence[IndexField] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=IndexField, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/location.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/location.py new file mode 100644 index 0000000000..0139a3962f --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/location.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.admin.v1', + manifest={ + 'LocationMetadata', + }, +) + + +class LocationMetadata(proto.Message): + r"""The metadata message for + [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata]. + + """ + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/operation.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/operation.py new file mode 100644 index 0000000000..e5be71be20 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/operation.py @@ -0,0 +1,507 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.firestore_admin_v1.types import index as gfa_index +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.admin.v1', + manifest={ + 'OperationState', + 'IndexOperationMetadata', + 'FieldOperationMetadata', + 'ExportDocumentsMetadata', + 'ImportDocumentsMetadata', + 'ExportDocumentsResponse', + 'RestoreDatabaseMetadata', + 'Progress', + }, +) + + +class OperationState(proto.Enum): + r"""Describes the state of the operation. + + Values: + OPERATION_STATE_UNSPECIFIED (0): + Unspecified. + INITIALIZING (1): + Request is being prepared for processing. + PROCESSING (2): + Request is actively being processed. + CANCELLING (3): + Request is in the process of being cancelled + after user called + google.longrunning.Operations.CancelOperation on + the operation. + FINALIZING (4): + Request has been processed and is in its + finalization stage. + SUCCESSFUL (5): + Request has completed successfully. + FAILED (6): + Request has finished being processed, but + encountered an error. + CANCELLED (7): + Request has finished being cancelled after + user called + google.longrunning.Operations.CancelOperation. + """ + OPERATION_STATE_UNSPECIFIED = 0 + INITIALIZING = 1 + PROCESSING = 2 + CANCELLING = 3 + FINALIZING = 4 + SUCCESSFUL = 5 + FAILED = 6 + CANCELLED = 7 + + +class IndexOperationMetadata(proto.Message): + r"""Metadata for + [google.longrunning.Operation][google.longrunning.Operation] results + from + [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time this operation started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time this operation completed. Will be + unset if operation still in progress. + index (str): + The index resource that this operation is acting on. For + example: + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` + state (google.cloud.firestore_admin_v1.types.OperationState): + The state of the operation. + progress_documents (google.cloud.firestore_admin_v1.types.Progress): + The progress, in documents, of this + operation. + progress_bytes (google.cloud.firestore_admin_v1.types.Progress): + The progress, in bytes, of this operation. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + index: str = proto.Field( + proto.STRING, + number=3, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, + number=4, + enum='OperationState', + ) + progress_documents: 'Progress' = proto.Field( + proto.MESSAGE, + number=5, + message='Progress', + ) + progress_bytes: 'Progress' = proto.Field( + proto.MESSAGE, + number=6, + message='Progress', + ) + + +class FieldOperationMetadata(proto.Message): + r"""Metadata for + [google.longrunning.Operation][google.longrunning.Operation] results + from + [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time this operation started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time this operation completed. Will be + unset if operation still in progress. + field (str): + The field resource that this operation is acting on. For + example: + ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`` + index_config_deltas (MutableSequence[google.cloud.firestore_admin_v1.types.FieldOperationMetadata.IndexConfigDelta]): + A list of + [IndexConfigDelta][google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta], + which describe the intent of this operation. + state (google.cloud.firestore_admin_v1.types.OperationState): + The state of the operation. + progress_documents (google.cloud.firestore_admin_v1.types.Progress): + The progress, in documents, of this + operation. + progress_bytes (google.cloud.firestore_admin_v1.types.Progress): + The progress, in bytes, of this operation. + ttl_config_delta (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.TtlConfigDelta): + Describes the deltas of TTL configuration. + """ + + class IndexConfigDelta(proto.Message): + r"""Information about an index configuration change. + + Attributes: + change_type (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.IndexConfigDelta.ChangeType): + Specifies how the index is changing. + index (google.cloud.firestore_admin_v1.types.Index): + The index being changed. + """ + class ChangeType(proto.Enum): + r"""Specifies how the index is changing. + + Values: + CHANGE_TYPE_UNSPECIFIED (0): + The type of change is not specified or known. + ADD (1): + The single field index is being added. + REMOVE (2): + The single field index is being removed. + """ + CHANGE_TYPE_UNSPECIFIED = 0 + ADD = 1 + REMOVE = 2 + + change_type: 'FieldOperationMetadata.IndexConfigDelta.ChangeType' = proto.Field( + proto.ENUM, + number=1, + enum='FieldOperationMetadata.IndexConfigDelta.ChangeType', + ) + index: gfa_index.Index = proto.Field( + proto.MESSAGE, + number=2, + message=gfa_index.Index, + ) + + class TtlConfigDelta(proto.Message): + r"""Information about a TTL configuration change. + + Attributes: + change_type (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.TtlConfigDelta.ChangeType): + Specifies how the TTL configuration is + changing. + """ + class ChangeType(proto.Enum): + r"""Specifies how the TTL config is changing. + + Values: + CHANGE_TYPE_UNSPECIFIED (0): + The type of change is not specified or known. + ADD (1): + The TTL config is being added. + REMOVE (2): + The TTL config is being removed. + """ + CHANGE_TYPE_UNSPECIFIED = 0 + ADD = 1 + REMOVE = 2 + + change_type: 'FieldOperationMetadata.TtlConfigDelta.ChangeType' = proto.Field( + proto.ENUM, + number=1, + enum='FieldOperationMetadata.TtlConfigDelta.ChangeType', + ) + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + field: str = proto.Field( + proto.STRING, + number=3, + ) + index_config_deltas: MutableSequence[IndexConfigDelta] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=IndexConfigDelta, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, + number=5, + enum='OperationState', + ) + progress_documents: 'Progress' = proto.Field( + proto.MESSAGE, + number=6, + message='Progress', + ) + progress_bytes: 'Progress' = proto.Field( + proto.MESSAGE, + number=7, + message='Progress', + ) + ttl_config_delta: TtlConfigDelta = proto.Field( + proto.MESSAGE, + number=8, + message=TtlConfigDelta, + ) + + +class ExportDocumentsMetadata(proto.Message): + r"""Metadata for + [google.longrunning.Operation][google.longrunning.Operation] results + from + [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time this operation started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time this operation completed. Will be + unset if operation still in progress. + operation_state (google.cloud.firestore_admin_v1.types.OperationState): + The state of the export operation. + progress_documents (google.cloud.firestore_admin_v1.types.Progress): + The progress, in documents, of this + operation. + progress_bytes (google.cloud.firestore_admin_v1.types.Progress): + The progress, in bytes, of this operation. + collection_ids (MutableSequence[str]): + Which collection ids are being exported. + output_uri_prefix (str): + Where the documents are being exported to. + namespace_ids (MutableSequence[str]): + Which namespace ids are being exported. + snapshot_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp that corresponds to the version + of the database that is being exported. If + unspecified, there are no guarantees about the + consistency of the documents being exported. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + operation_state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + progress_documents: 'Progress' = proto.Field( + proto.MESSAGE, + number=4, + message='Progress', + ) + progress_bytes: 'Progress' = proto.Field( + proto.MESSAGE, + number=5, + message='Progress', + ) + collection_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + output_uri_prefix: str = proto.Field( + proto.STRING, + number=7, + ) + namespace_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + snapshot_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + + +class ImportDocumentsMetadata(proto.Message): + r"""Metadata for + [google.longrunning.Operation][google.longrunning.Operation] results + from + [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time this operation started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time this operation completed. Will be + unset if operation still in progress. + operation_state (google.cloud.firestore_admin_v1.types.OperationState): + The state of the import operation. + progress_documents (google.cloud.firestore_admin_v1.types.Progress): + The progress, in documents, of this + operation. + progress_bytes (google.cloud.firestore_admin_v1.types.Progress): + The progress, in bytes, of this operation. + collection_ids (MutableSequence[str]): + Which collection ids are being imported. + input_uri_prefix (str): + The location of the documents being imported. + namespace_ids (MutableSequence[str]): + Which namespace ids are being imported. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + operation_state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + progress_documents: 'Progress' = proto.Field( + proto.MESSAGE, + number=4, + message='Progress', + ) + progress_bytes: 'Progress' = proto.Field( + proto.MESSAGE, + number=5, + message='Progress', + ) + collection_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + input_uri_prefix: str = proto.Field( + proto.STRING, + number=7, + ) + namespace_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + + +class ExportDocumentsResponse(proto.Message): + r"""Returned in the + [google.longrunning.Operation][google.longrunning.Operation] + response field. + + Attributes: + output_uri_prefix (str): + Location of the output files. This can be + used to begin an import into Cloud Firestore + (this project or another project) after the + operation completes successfully. + """ + + output_uri_prefix: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RestoreDatabaseMetadata(proto.Message): + r"""Metadata for the [long-running + operation][google.longrunning.Operation] from the + [RestoreDatabase][google.firestore.admin.v1.RestoreDatabase] + request. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time the restore was started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the restore finished, unset for + ongoing restores. + operation_state (google.cloud.firestore_admin_v1.types.OperationState): + The operation state of the restore. + database (str): + The name of the database being restored to. + backup (str): + The name of the backup restoring from. + progress_percentage (google.cloud.firestore_admin_v1.types.Progress): + How far along the restore is as an estimated + percentage of remaining time. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + operation_state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + database: str = proto.Field( + proto.STRING, + number=4, + ) + backup: str = proto.Field( + proto.STRING, + number=5, + ) + progress_percentage: 'Progress' = proto.Field( + proto.MESSAGE, + number=8, + message='Progress', + ) + + +class Progress(proto.Message): + r"""Describes the progress of the operation. Unit of work is generic and + must be interpreted based on where + [Progress][google.firestore.admin.v1.Progress] is used. + + Attributes: + estimated_work (int): + The amount of work estimated. + completed_work (int): + The amount of work completed. + """ + + estimated_work: int = proto.Field( + proto.INT64, + number=1, + ) + completed_work: int = proto.Field( + proto.INT64, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/schedule.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/schedule.py new file mode 100644 index 0000000000..31bdb020a4 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/schedule.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.admin.v1', + manifest={ + 'BackupSchedule', + 'DailyRecurrence', + 'WeeklyRecurrence', + }, +) + + +class BackupSchedule(proto.Message): + r"""A backup schedule for a Cloud Firestore Database. + + This resource is owned by the database it is backing up, and is + deleted along with the database. The actual backups are not + though. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The unique backup schedule identifier across + all locations and databases for the given project. + + This will be auto-assigned. + + Format is + ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp at which this + backup schedule was created and effective since. + + No backups will be created for this schedule + before this time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp at which this backup schedule was + most recently updated. When a backup schedule is first + created, this is the same as create_time. + retention (google.protobuf.duration_pb2.Duration): + At what relative time in the future, compared + to its creation time, the backup should be + deleted, e.g. keep backups for 7 days. + daily_recurrence (google.cloud.firestore_admin_v1.types.DailyRecurrence): + For a schedule that runs daily. + + This field is a member of `oneof`_ ``recurrence``. + weekly_recurrence (google.cloud.firestore_admin_v1.types.WeeklyRecurrence): + For a schedule that runs weekly on a specific + day. + + This field is a member of `oneof`_ ``recurrence``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + retention: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + daily_recurrence: 'DailyRecurrence' = proto.Field( + proto.MESSAGE, + number=7, + oneof='recurrence', + message='DailyRecurrence', + ) + weekly_recurrence: 'WeeklyRecurrence' = proto.Field( + proto.MESSAGE, + number=8, + oneof='recurrence', + message='WeeklyRecurrence', + ) + + +class DailyRecurrence(proto.Message): + r"""Represents a recurring schedule that runs at a specific time + every day. + The time zone is UTC. + + """ + + +class WeeklyRecurrence(proto.Message): + r"""Represents a recurring schedule that runs on a specified day + of the week. + The time zone is UTC. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + The day of week to run. + + DAY_OF_WEEK_UNSPECIFIED is not allowed. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/mypy.ini b/owl-bot-staging/firestore_admin/v1/mypy.ini new file mode 100644 index 0000000000..574c5aed39 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/firestore_admin/v1/noxfile.py b/owl-bot-staging/firestore_admin/v1/noxfile.py new file mode 100644 index 0000000000..57ae37144f --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/noxfile.py @@ -0,0 +1,253 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12" +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-firestore-admin' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.12" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/firestore_admin_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + +@nox.session(python=ALL_PYTHON[-1]) +def prerelease_deps(session): + """Run the unit test suite against pre-release versions of dependencies.""" + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/firestore_admin_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_async.py new file mode 100644 index 0000000000..686098f602 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_create_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateBackupScheduleRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py new file mode 100644 index 0000000000..66ba2b1c1a --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_create_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateBackupScheduleRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_async.py new file mode 100644 index 0000000000..bae419d964 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_CreateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_create_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_CreateDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_sync.py new file mode 100644 index 0000000000..57c85d136c --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_CreateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_create_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_CreateDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_async.py new file mode 100644 index 0000000000..7a98c87fe1 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateIndex +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_CreateIndex_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_create_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateIndexRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_index(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_CreateIndex_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_sync.py new file mode 100644 index 0000000000..dc6db35bd5 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateIndex +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_CreateIndex_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_create_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.CreateIndexRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_CreateIndex_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_async.py new file mode 100644 index 0000000000..48d49525e5 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_delete_backup(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup(request=request) + + +# [END firestore_v1_generated_FirestoreAdmin_DeleteBackup_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py new file mode 100644 index 0000000000..52fdf18acf --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_delete_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup_schedule(request=request) + + +# [END firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py new file mode 100644 index 0000000000..985d1d4449 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_delete_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + client.delete_backup_schedule(request=request) + + +# [END firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_sync.py new file mode 100644 index 0000000000..5f9d8c4e61 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_delete_backup(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + client.delete_backup(request=request) + + +# [END firestore_v1_generated_FirestoreAdmin_DeleteBackup_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_async.py new file mode 100644 index 0000000000..9a1915405a --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_DeleteDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_delete_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_DeleteDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_sync.py new file mode 100644 index 0000000000..de3b038b47 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_DeleteDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_delete_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_DeleteDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_async.py new file mode 100644 index 0000000000..233b861c88 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteIndex +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_DeleteIndex_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_delete_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteIndexRequest( + name="name_value", + ) + + # Make the request + await client.delete_index(request=request) + + +# [END firestore_v1_generated_FirestoreAdmin_DeleteIndex_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_sync.py new file mode 100644 index 0000000000..81b1ff6597 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteIndex +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_DeleteIndex_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_delete_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.DeleteIndexRequest( + name="name_value", + ) + + # Make the request + client.delete_index(request=request) + + +# [END firestore_v1_generated_FirestoreAdmin_DeleteIndex_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_async.py new file mode 100644 index 0000000000..32b8249283 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ExportDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_export_documents(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ExportDocumentsRequest( + name="name_value", + ) + + # Make the request + operation = client.export_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ExportDocuments_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_sync.py new file mode 100644 index 0000000000..4a63526654 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ExportDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_export_documents(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ExportDocumentsRequest( + name="name_value", + ) + + # Make the request + operation = client.export_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ExportDocuments_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_async.py new file mode 100644 index 0000000000..a5133fa0aa --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_get_backup(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetBackup_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_async.py new file mode 100644 index 0000000000..b0f035cb89 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_get_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py new file mode 100644 index 0000000000..2886304dbb --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_get_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_sync.py new file mode 100644 index 0000000000..4aab97dac8 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_get_backup(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetBackup_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_async.py new file mode 100644 index 0000000000..7be034aced --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_get_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_sync.py new file mode 100644 index 0000000000..697e7545c6 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_get_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_async.py new file mode 100644 index 0000000000..2613a67cbf --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_get_field(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetFieldRequest( + name="name_value", + ) + + # Make the request + response = await client.get_field(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetField_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_sync.py new file mode 100644 index 0000000000..0127a35473 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_get_field(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetFieldRequest( + name="name_value", + ) + + # Make the request + response = client.get_field(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetField_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_async.py new file mode 100644 index 0000000000..b04665e3eb --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIndex +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetIndex_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_get_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetIndexRequest( + name="name_value", + ) + + # Make the request + response = await client.get_index(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetIndex_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_sync.py new file mode 100644 index 0000000000..f7deed1839 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIndex +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_GetIndex_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_get_index(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.GetIndexRequest( + name="name_value", + ) + + # Make the request + response = client.get_index(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_GetIndex_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_async.py new file mode 100644 index 0000000000..7eb3b81113 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ImportDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_import_documents(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ImportDocumentsRequest( + name="name_value", + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ImportDocuments_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_sync.py new file mode 100644 index 0000000000..64f43b25b3 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ImportDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_import_documents(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ImportDocumentsRequest( + name="name_value", + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ImportDocuments_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_async.py new file mode 100644 index 0000000000..7f80d9fd96 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupSchedules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_list_backup_schedules(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_backup_schedules(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py new file mode 100644 index 0000000000..825263f099 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupSchedules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_list_backup_schedules(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_backup_schedules(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_async.py new file mode 100644 index 0000000000..b3f1934253 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_list_backups(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_backups(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListBackups_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_sync.py new file mode 100644 index 0000000000..1decda165b --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_list_backups(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_backups(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListBackups_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_async.py new file mode 100644 index 0000000000..f27851537c --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListDatabases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_list_databases(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_databases(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListDatabases_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_sync.py new file mode 100644 index 0000000000..b8fe32fa20 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListDatabases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_list_databases(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_databases(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListDatabases_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_async.py new file mode 100644 index 0000000000..43b341a27e --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFields +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListFields_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_list_fields(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListFieldsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_fields(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListFields_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_sync.py new file mode 100644 index 0000000000..485967b786 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFields +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListFields_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_list_fields(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListFieldsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_fields(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListFields_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_async.py new file mode 100644 index 0000000000..991fc2995b --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListIndexes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListIndexes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_list_indexes(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListIndexesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_indexes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListIndexes_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_sync.py new file mode 100644 index 0000000000..94bfa11971 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListIndexes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_ListIndexes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_list_indexes(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.ListIndexesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_indexes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_ListIndexes_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_async.py new file mode 100644 index 0000000000..6102bf2c68 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_RestoreDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_restore_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.RestoreDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + backup="backup_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_RestoreDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_sync.py new file mode 100644 index 0000000000..5eca0416ed --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_RestoreDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_restore_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.RestoreDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + backup="backup_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_RestoreDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_async.py new file mode 100644 index 0000000000..7a6a5ab72c --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_update_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = await client.update_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py new file mode 100644 index 0000000000..b6ae8ee2f2 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_update_backup_schedule(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = client.update_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_async.py new file mode 100644 index 0000000000..b9e089f16a --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_UpdateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_update_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + request = firestore_admin_v1.UpdateDatabaseRequest( + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_UpdateDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_sync.py new file mode 100644 index 0000000000..8913e0ddbb --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_UpdateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_update_database(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + request = firestore_admin_v1.UpdateDatabaseRequest( + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_UpdateDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_async.py new file mode 100644 index 0000000000..98bc008dd5 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_UpdateField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +async def sample_update_field(): + # Create a client + client = firestore_admin_v1.FirestoreAdminAsyncClient() + + # Initialize request argument(s) + field = firestore_admin_v1.Field() + field.name = "name_value" + + request = firestore_admin_v1.UpdateFieldRequest( + field=field, + ) + + # Make the request + operation = client.update_field(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_UpdateField_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_sync.py new file mode 100644 index 0000000000..100601d1c4 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-firestore-admin + + +# [START firestore_v1_generated_FirestoreAdmin_UpdateField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import firestore_admin_v1 + + +def sample_update_field(): + # Create a client + client = firestore_admin_v1.FirestoreAdminClient() + + # Initialize request argument(s) + field = firestore_admin_v1.Field() + field.name = "name_value" + + request = firestore_admin_v1.UpdateFieldRequest( + field=field, + ) + + # Make the request + operation = client.update_field(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END firestore_v1_generated_FirestoreAdmin_UpdateField_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/snippet_metadata_google.firestore.admin.v1.json b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/snippet_metadata_google.firestore.admin.v1.json new file mode 100644 index 0000000000..ce3c2bafb3 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/snippet_metadata_google.firestore.admin.v1.json @@ -0,0 +1,3740 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.firestore.admin.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-firestore-admin", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.create_backup_schedule", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "CreateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_schedule", + "type": "google.cloud.firestore_admin_v1.types.BackupSchedule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", + "shortName": "create_backup_schedule" + }, + "description": "Sample for CreateBackupSchedule", + "file": "firestore_v1_generated_firestore_admin_create_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_create_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.create_backup_schedule", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "CreateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_schedule", + "type": "google.cloud.firestore_admin_v1.types.BackupSchedule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", + "shortName": "create_backup_schedule" + }, + "description": "Sample for CreateBackupSchedule", + "file": "firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.create_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.firestore_admin_v1.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "firestore_v1_generated_firestore_admin_create_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateDatabase_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_create_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.create_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.firestore_admin_v1.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "firestore_v1_generated_firestore_admin_create_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateDatabase_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_create_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.create_index", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateIndex", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "CreateIndex" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.CreateIndexRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "index", + "type": "google.cloud.firestore_admin_v1.types.Index" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_index" + }, + "description": "Sample for CreateIndex", + "file": "firestore_v1_generated_firestore_admin_create_index_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateIndex_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_create_index_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.create_index", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateIndex", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "CreateIndex" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.CreateIndexRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "index", + "type": "google.cloud.firestore_admin_v1.types.Index" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_index" + }, + "description": "Sample for CreateIndex", + "file": "firestore_v1_generated_firestore_admin_create_index_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateIndex_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_create_index_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.delete_backup_schedule", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteBackupSchedule", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "DeleteBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_backup_schedule" + }, + "description": "Sample for DeleteBackupSchedule", + "file": "firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.delete_backup_schedule", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteBackupSchedule", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "DeleteBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_backup_schedule" + }, + "description": "Sample for DeleteBackupSchedule", + "file": "firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.delete_backup", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteBackup", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "firestore_v1_generated_firestore_admin_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteBackup_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.delete_backup", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteBackup", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "firestore_v1_generated_firestore_admin_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteBackup_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.delete_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "DeleteDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_database" + }, + "description": "Sample for DeleteDatabase", + "file": "firestore_v1_generated_firestore_admin_delete_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteDatabase_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_delete_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.delete_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "DeleteDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_database" + }, + "description": "Sample for DeleteDatabase", + "file": "firestore_v1_generated_firestore_admin_delete_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteDatabase_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_delete_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.delete_index", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteIndex", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "DeleteIndex" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.DeleteIndexRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_index" + }, + "description": "Sample for DeleteIndex", + "file": "firestore_v1_generated_firestore_admin_delete_index_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteIndex_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_delete_index_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.delete_index", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteIndex", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "DeleteIndex" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.DeleteIndexRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_index" + }, + "description": "Sample for DeleteIndex", + "file": "firestore_v1_generated_firestore_admin_delete_index_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteIndex_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_delete_index_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.export_documents", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ExportDocuments", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ExportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ExportDocumentsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_documents" + }, + "description": "Sample for ExportDocuments", + "file": "firestore_v1_generated_firestore_admin_export_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ExportDocuments_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_export_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.export_documents", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ExportDocuments", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ExportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ExportDocumentsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_documents" + }, + "description": "Sample for ExportDocuments", + "file": "firestore_v1_generated_firestore_admin_export_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ExportDocuments_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_export_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_backup_schedule", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", + "shortName": "get_backup_schedule" + }, + "description": "Sample for GetBackupSchedule", + "file": "firestore_v1_generated_firestore_admin_get_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_backup_schedule", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", + "shortName": "get_backup_schedule" + }, + "description": "Sample for GetBackupSchedule", + "file": "firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_backup", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetBackup", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "firestore_v1_generated_firestore_admin_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_backup", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetBackup", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "firestore_v1_generated_firestore_admin_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "firestore_v1_generated_firestore_admin_get_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetDatabase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "firestore_v1_generated_firestore_admin_get_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_field", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetField", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.Field", + "shortName": "get_field" + }, + "description": "Sample for GetField", + "file": "firestore_v1_generated_firestore_admin_get_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetField_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_field", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetField", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.Field", + "shortName": "get_field" + }, + "description": "Sample for GetField", + "file": "firestore_v1_generated_firestore_admin_get_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetField_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_field_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_index", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetIndex", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetIndex" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetIndexRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.Index", + "shortName": "get_index" + }, + "description": "Sample for GetIndex", + "file": "firestore_v1_generated_firestore_admin_get_index_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetIndex_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_index_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_index", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetIndex", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "GetIndex" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.GetIndexRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.Index", + "shortName": "get_index" + }, + "description": "Sample for GetIndex", + "file": "firestore_v1_generated_firestore_admin_get_index_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_GetIndex_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_get_index_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.import_documents", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ImportDocuments", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ImportDocumentsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "firestore_v1_generated_firestore_admin_import_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ImportDocuments_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_import_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.import_documents", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ImportDocuments", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ImportDocumentsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "firestore_v1_generated_firestore_admin_import_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ImportDocuments_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_import_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_backup_schedules", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListBackupSchedules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse", + "shortName": "list_backup_schedules" + }, + "description": "Sample for ListBackupSchedules", + "file": "firestore_v1_generated_firestore_admin_list_backup_schedules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_backup_schedules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_backup_schedules", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListBackupSchedules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse", + "shortName": "list_backup_schedules" + }, + "description": "Sample for ListBackupSchedules", + "file": "firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_backups", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListBackups", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.ListBackupsResponse", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "firestore_v1_generated_firestore_admin_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListBackups_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_backups", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListBackups", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.ListBackupsResponse", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "firestore_v1_generated_firestore_admin_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListBackups_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_databases", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListDatabases", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.ListDatabasesResponse", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "firestore_v1_generated_firestore_admin_list_databases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListDatabases_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_databases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_databases", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListDatabases", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.ListDatabasesResponse", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "firestore_v1_generated_firestore_admin_list_databases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListDatabases_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_databases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_fields", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListFields", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListFields" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListFieldsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsAsyncPager", + "shortName": "list_fields" + }, + "description": "Sample for ListFields", + "file": "firestore_v1_generated_firestore_admin_list_fields_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListFields_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_fields_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_fields", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListFields", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListFields" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListFieldsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsPager", + "shortName": "list_fields" + }, + "description": "Sample for ListFields", + "file": "firestore_v1_generated_firestore_admin_list_fields_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListFields_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_fields_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_indexes", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListIndexes", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListIndexes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListIndexesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesAsyncPager", + "shortName": "list_indexes" + }, + "description": "Sample for ListIndexes", + "file": "firestore_v1_generated_firestore_admin_list_indexes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListIndexes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_indexes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_indexes", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListIndexes", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "ListIndexes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.ListIndexesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesPager", + "shortName": "list_indexes" + }, + "description": "Sample for ListIndexes", + "file": "firestore_v1_generated_firestore_admin_list_indexes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_ListIndexes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_list_indexes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.restore_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.RestoreDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "RestoreDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_database" + }, + "description": "Sample for RestoreDatabase", + "file": "firestore_v1_generated_firestore_admin_restore_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_RestoreDatabase_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_restore_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.restore_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.RestoreDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "RestoreDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_database" + }, + "description": "Sample for RestoreDatabase", + "file": "firestore_v1_generated_firestore_admin_restore_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_RestoreDatabase_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_restore_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.update_backup_schedule", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "UpdateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest" + }, + { + "name": "backup_schedule", + "type": "google.cloud.firestore_admin_v1.types.BackupSchedule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", + "shortName": "update_backup_schedule" + }, + "description": "Sample for UpdateBackupSchedule", + "file": "firestore_v1_generated_firestore_admin_update_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_update_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.update_backup_schedule", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "UpdateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest" + }, + { + "name": "backup_schedule", + "type": "google.cloud.firestore_admin_v1.types.BackupSchedule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", + "shortName": "update_backup_schedule" + }, + "description": "Sample for UpdateBackupSchedule", + "file": "firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.update_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.firestore_admin_v1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "firestore_v1_generated_firestore_admin_update_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateDatabase_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_update_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.update_database", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.firestore_admin_v1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "firestore_v1_generated_firestore_admin_update_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateDatabase_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_update_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", + "shortName": "FirestoreAdminAsyncClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.update_field", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateField", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "UpdateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.UpdateFieldRequest" + }, + { + "name": "field", + "type": "google.cloud.firestore_admin_v1.types.Field" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_field" + }, + "description": "Sample for UpdateField", + "file": "firestore_v1_generated_firestore_admin_update_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateField_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_update_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", + "shortName": "FirestoreAdminClient" + }, + "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.update_field", + "method": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateField", + "service": { + "fullName": "google.firestore.admin.v1.FirestoreAdmin", + "shortName": "FirestoreAdmin" + }, + "shortName": "UpdateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.firestore_admin_v1.types.UpdateFieldRequest" + }, + { + "name": "field", + "type": "google.cloud.firestore_admin_v1.types.Field" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_field" + }, + "description": "Sample for UpdateField", + "file": "firestore_v1_generated_firestore_admin_update_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateField_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "firestore_v1_generated_firestore_admin_update_field_sync.py" + } + ] +} diff --git a/owl-bot-staging/firestore_admin/v1/scripts/fixup_firestore_admin_v1_keywords.py b/owl-bot-staging/firestore_admin/v1/scripts/fixup_firestore_admin_v1_keywords.py new file mode 100644 index 0000000000..6c34107827 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/scripts/fixup_firestore_admin_v1_keywords.py @@ -0,0 +1,198 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class firestore_adminCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_backup_schedule': ('parent', 'backup_schedule', ), + 'create_database': ('parent', 'database', 'database_id', ), + 'create_index': ('parent', 'index', ), + 'delete_backup': ('name', ), + 'delete_backup_schedule': ('name', ), + 'delete_database': ('name', 'etag', ), + 'delete_index': ('name', ), + 'export_documents': ('name', 'collection_ids', 'output_uri_prefix', 'namespace_ids', 'snapshot_time', ), + 'get_backup': ('name', ), + 'get_backup_schedule': ('name', ), + 'get_database': ('name', ), + 'get_field': ('name', ), + 'get_index': ('name', ), + 'import_documents': ('name', 'collection_ids', 'input_uri_prefix', 'namespace_ids', ), + 'list_backups': ('parent', ), + 'list_backup_schedules': ('parent', ), + 'list_databases': ('parent', ), + 'list_fields': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_indexes': ('parent', 'filter', 'page_size', 'page_token', ), + 'restore_database': ('parent', 'database_id', 'backup', ), + 'update_backup_schedule': ('backup_schedule', 'update_mask', ), + 'update_database': ('database', 'update_mask', ), + 'update_field': ('field', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=firestore_adminCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the firestore_admin client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/firestore_admin/v1/setup.py b/owl-bot-staging/firestore_admin/v1/setup.py new file mode 100644 index 0000000000..99daa39d68 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/setup.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-firestore-admin' + + +description = "Google Cloud Firestore Admin API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/firestore_admin/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-firestore-admin" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.10.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.11.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.12.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.7.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000000..b8a550c738 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.8.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.9.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_admin/v1/tests/__init__.py b/owl-bot-staging/firestore_admin/v1/tests/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore_admin/v1/tests/unit/__init__.py b/owl-bot-staging/firestore_admin/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/__init__.py b/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py new file mode 100644 index 0000000000..6073726d67 --- /dev/null +++ b/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py @@ -0,0 +1,15150 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.firestore_admin_v1.services.firestore_admin import FirestoreAdminAsyncClient +from google.cloud.firestore_admin_v1.services.firestore_admin import FirestoreAdminClient +from google.cloud.firestore_admin_v1.services.firestore_admin import pagers +from google.cloud.firestore_admin_v1.services.firestore_admin import transports +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import database as gfa_database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import field as gfa_field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import index as gfa_index +from google.cloud.firestore_admin_v1.types import operation as gfa_operation +from google.cloud.firestore_admin_v1.types import schedule +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert FirestoreAdminClient._get_default_mtls_endpoint(None) is None + assert FirestoreAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert FirestoreAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert FirestoreAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert FirestoreAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert FirestoreAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert FirestoreAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert FirestoreAdminClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert FirestoreAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + FirestoreAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert FirestoreAdminClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert FirestoreAdminClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert FirestoreAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + FirestoreAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert FirestoreAdminClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert FirestoreAdminClient._get_client_cert_source(None, False) is None + assert FirestoreAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert FirestoreAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert FirestoreAdminClient._get_client_cert_source(None, True) is mock_default_cert_source + assert FirestoreAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(FirestoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminClient)) +@mock.patch.object(FirestoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE + default_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert FirestoreAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert FirestoreAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT + assert FirestoreAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert FirestoreAdminClient._get_api_endpoint(None, None, default_universe, "always") == FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT + assert FirestoreAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT + assert FirestoreAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert FirestoreAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + FirestoreAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert FirestoreAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert FirestoreAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert FirestoreAdminClient._get_universe_domain(None, None) == FirestoreAdminClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + FirestoreAdminClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"), + (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FirestoreAdminClient, "grpc"), + (FirestoreAdminAsyncClient, "grpc_asyncio"), + (FirestoreAdminClient, "rest"), +]) +def test_firestore_admin_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'firestore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://firestore.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.FirestoreAdminGrpcTransport, "grpc"), + (transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.FirestoreAdminRestTransport, "rest"), +]) +def test_firestore_admin_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FirestoreAdminClient, "grpc"), + (FirestoreAdminAsyncClient, "grpc_asyncio"), + (FirestoreAdminClient, "rest"), +]) +def test_firestore_admin_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'firestore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://firestore.googleapis.com' + ) + + +def test_firestore_admin_client_get_transport_class(): + transport = FirestoreAdminClient.get_transport_class() + available_transports = [ + transports.FirestoreAdminGrpcTransport, + transports.FirestoreAdminRestTransport, + ] + assert transport in available_transports + + transport = FirestoreAdminClient.get_transport_class("grpc") + assert transport == transports.FirestoreAdminGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"), + (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest"), +]) +@mock.patch.object(FirestoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminClient)) +@mock.patch.object(FirestoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminAsyncClient)) +def test_firestore_admin_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(FirestoreAdminClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(FirestoreAdminClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", "true"), + (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", "false"), + (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest", "true"), + (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest", "false"), +]) +@mock.patch.object(FirestoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminClient)) +@mock.patch.object(FirestoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_firestore_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + FirestoreAdminClient, FirestoreAdminAsyncClient +]) +@mock.patch.object(FirestoreAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreAdminClient)) +@mock.patch.object(FirestoreAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreAdminAsyncClient)) +def test_firestore_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + FirestoreAdminClient, FirestoreAdminAsyncClient +]) +@mock.patch.object(FirestoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminClient)) +@mock.patch.object(FirestoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminAsyncClient)) +def test_firestore_admin_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE + default_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"), + (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest"), +]) +def test_firestore_admin_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", grpc_helpers), + (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest", None), +]) +def test_firestore_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_firestore_admin_client_client_options_from_dict(): + with mock.patch('google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = FirestoreAdminClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", grpc_helpers), + (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_firestore_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', +), + scopes=None, + default_host="firestore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.CreateIndexRequest, + dict, +]) +def test_create_index(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.CreateIndexRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_index_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_index), + '__call__') as call: + client.create_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.CreateIndexRequest() + + +def test_create_index_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.CreateIndexRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_index), + '__call__') as call: + client.create_index(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.CreateIndexRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_create_index_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.CreateIndexRequest() + +@pytest.mark.asyncio +async def test_create_index_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.CreateIndexRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.CreateIndexRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_index_async_from_dict(): + await test_create_index_async(request_type=dict) + + +def test_create_index_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.CreateIndexRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_index), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_index_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.CreateIndexRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_index), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_index_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_index( + parent='parent_value', + index=gfa_index.Index(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].index + mock_val = gfa_index.Index(name='name_value') + assert arg == mock_val + + +def test_create_index_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_index( + firestore_admin.CreateIndexRequest(), + parent='parent_value', + index=gfa_index.Index(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_index_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_index( + parent='parent_value', + index=gfa_index.Index(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].index + mock_val = gfa_index.Index(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_index_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_index( + firestore_admin.CreateIndexRequest(), + parent='parent_value', + index=gfa_index.Index(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListIndexesRequest, + dict, +]) +def test_list_indexes(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListIndexesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListIndexesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIndexesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_indexes_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + client.list_indexes() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListIndexesRequest() + + +def test_list_indexes_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.ListIndexesRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + client.list_indexes(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListIndexesRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_indexes_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListIndexesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_indexes() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListIndexesRequest() + +@pytest.mark.asyncio +async def test_list_indexes_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListIndexesRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListIndexesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListIndexesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIndexesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_indexes_async_from_dict(): + await test_list_indexes_async(request_type=dict) + + +def test_list_indexes_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListIndexesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + call.return_value = firestore_admin.ListIndexesResponse() + client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_indexes_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListIndexesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListIndexesResponse()) + await client.list_indexes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_indexes_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListIndexesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_indexes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_indexes_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_indexes( + firestore_admin.ListIndexesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_indexes_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListIndexesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListIndexesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_indexes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_indexes_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_indexes( + firestore_admin.ListIndexesRequest(), + parent='parent_value', + ) + + +def test_list_indexes_pager(transport_name: str = "grpc"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], + next_page_token='abc', + ), + firestore_admin.ListIndexesResponse( + indexes=[], + next_page_token='def', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + ], + next_page_token='ghi', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_indexes(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, index.Index) + for i in results) +def test_list_indexes_pages(transport_name: str = "grpc"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], + next_page_token='abc', + ), + firestore_admin.ListIndexesResponse( + indexes=[], + next_page_token='def', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + ], + next_page_token='ghi', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], + ), + RuntimeError, + ) + pages = list(client.list_indexes(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_indexes_async_pager(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], + next_page_token='abc', + ), + firestore_admin.ListIndexesResponse( + indexes=[], + next_page_token='def', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + ], + next_page_token='ghi', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_indexes(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, index.Index) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_indexes_async_pages(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_indexes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], + next_page_token='abc', + ), + firestore_admin.ListIndexesResponse( + indexes=[], + next_page_token='def', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + ], + next_page_token='ghi', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_indexes(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetIndexRequest, + dict, +]) +def test_get_index(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = index.Index( + name='name_value', + query_scope=index.Index.QueryScope.COLLECTION, + api_scope=index.Index.ApiScope.DATASTORE_MODE_API, + state=index.Index.State.CREATING, + ) + response = client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetIndexRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, index.Index) + assert response.name == 'name_value' + assert response.query_scope == index.Index.QueryScope.COLLECTION + assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API + assert response.state == index.Index.State.CREATING + + +def test_get_index_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_index), + '__call__') as call: + client.get_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetIndexRequest() + + +def test_get_index_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.GetIndexRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_index), + '__call__') as call: + client.get_index(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetIndexRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_index_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index( + name='name_value', + query_scope=index.Index.QueryScope.COLLECTION, + api_scope=index.Index.ApiScope.DATASTORE_MODE_API, + state=index.Index.State.CREATING, + )) + response = await client.get_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetIndexRequest() + +@pytest.mark.asyncio +async def test_get_index_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetIndexRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(index.Index( + name='name_value', + query_scope=index.Index.QueryScope.COLLECTION, + api_scope=index.Index.ApiScope.DATASTORE_MODE_API, + state=index.Index.State.CREATING, + )) + response = await client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetIndexRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, index.Index) + assert response.name == 'name_value' + assert response.query_scope == index.Index.QueryScope.COLLECTION + assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API + assert response.state == index.Index.State.CREATING + + +@pytest.mark.asyncio +async def test_get_index_async_from_dict(): + await test_get_index_async(request_type=dict) + + +def test_get_index_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetIndexRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_index), + '__call__') as call: + call.return_value = index.Index() + client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_index_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetIndexRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_index), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index()) + await client.get_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_index_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = index.Index() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_index( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_index_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_index( + firestore_admin.GetIndexRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_index_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = index.Index() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_index( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_index_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_index( + firestore_admin.GetIndexRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.DeleteIndexRequest, + dict, +]) +def test_delete_index(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.DeleteIndexRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_index_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_index), + '__call__') as call: + client.delete_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteIndexRequest() + + +def test_delete_index_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.DeleteIndexRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_index), + '__call__') as call: + client.delete_index(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteIndexRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_index_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_index() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteIndexRequest() + +@pytest.mark.asyncio +async def test_delete_index_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.DeleteIndexRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.DeleteIndexRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_index_async_from_dict(): + await test_delete_index_async(request_type=dict) + + +def test_delete_index_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.DeleteIndexRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_index), + '__call__') as call: + call.return_value = None + client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_index_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.DeleteIndexRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_index), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_index(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_index_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_index( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_index_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_index( + firestore_admin.DeleteIndexRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_index_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_index), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_index( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_index_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_index( + firestore_admin.DeleteIndexRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetFieldRequest, + dict, +]) +def test_get_field(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = field.Field( + name='name_value', + ) + response = client.get_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetFieldRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, field.Field) + assert response.name == 'name_value' + + +def test_get_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_field), + '__call__') as call: + client.get_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetFieldRequest() + + +def test_get_field_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.GetFieldRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_field), + '__call__') as call: + client.get_field(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetFieldRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_field_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field( + name='name_value', + )) + response = await client.get_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetFieldRequest() + +@pytest.mark.asyncio +async def test_get_field_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetFieldRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(field.Field( + name='name_value', + )) + response = await client.get_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetFieldRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, field.Field) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_field_async_from_dict(): + await test_get_field_async(request_type=dict) + + +def test_get_field_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_field), + '__call__') as call: + call.return_value = field.Field() + client.get_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_field_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field()) + await client.get_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_field_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = field.Field() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_field( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_field_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_field( + firestore_admin.GetFieldRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_field_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = field.Field() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_field( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_field_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_field( + firestore_admin.GetFieldRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.UpdateFieldRequest, + dict, +]) +def test_update_field(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.UpdateFieldRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_field), + '__call__') as call: + client.update_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.UpdateFieldRequest() + + +def test_update_field_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.UpdateFieldRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_field), + '__call__') as call: + client.update_field(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.UpdateFieldRequest( + ) + +@pytest.mark.asyncio +async def test_update_field_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.UpdateFieldRequest() + +@pytest.mark.asyncio +async def test_update_field_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.UpdateFieldRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.UpdateFieldRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_field_async_from_dict(): + await test_update_field_async(request_type=dict) + + +def test_update_field_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.UpdateFieldRequest() + + request.field.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_field), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'field.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_field_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.UpdateFieldRequest() + + request.field.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'field.name=name_value', + ) in kw['metadata'] + + +def test_update_field_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_field( + field=gfa_field.Field(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].field + mock_val = gfa_field.Field(name='name_value') + assert arg == mock_val + + +def test_update_field_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_field( + firestore_admin.UpdateFieldRequest(), + field=gfa_field.Field(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_update_field_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_field( + field=gfa_field.Field(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].field + mock_val = gfa_field.Field(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_field_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_field( + firestore_admin.UpdateFieldRequest(), + field=gfa_field.Field(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListFieldsRequest, + dict, +]) +def test_list_fields(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListFieldsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_fields(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListFieldsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFieldsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_fields_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + client.list_fields() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListFieldsRequest() + + +def test_list_fields_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.ListFieldsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + client.list_fields(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListFieldsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_fields_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListFieldsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_fields() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListFieldsRequest() + +@pytest.mark.asyncio +async def test_list_fields_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListFieldsRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListFieldsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_fields(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListFieldsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFieldsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_fields_async_from_dict(): + await test_list_fields_async(request_type=dict) + + +def test_list_fields_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListFieldsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + call.return_value = firestore_admin.ListFieldsResponse() + client.list_fields(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_fields_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListFieldsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListFieldsResponse()) + await client.list_fields(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_fields_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListFieldsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_fields( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_fields_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_fields( + firestore_admin.ListFieldsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_fields_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListFieldsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListFieldsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_fields( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_fields_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_fields( + firestore_admin.ListFieldsRequest(), + parent='parent_value', + ) + + +def test_list_fields_pager(transport_name: str = "grpc"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + field.Field(), + ], + next_page_token='abc', + ), + firestore_admin.ListFieldsResponse( + fields=[], + next_page_token='def', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + ], + next_page_token='ghi', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_fields(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, field.Field) + for i in results) +def test_list_fields_pages(transport_name: str = "grpc"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + field.Field(), + ], + next_page_token='abc', + ), + firestore_admin.ListFieldsResponse( + fields=[], + next_page_token='def', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + ], + next_page_token='ghi', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + ], + ), + RuntimeError, + ) + pages = list(client.list_fields(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_fields_async_pager(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + field.Field(), + ], + next_page_token='abc', + ), + firestore_admin.ListFieldsResponse( + fields=[], + next_page_token='def', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + ], + next_page_token='ghi', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_fields(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, field.Field) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_fields_async_pages(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_fields), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + field.Field(), + ], + next_page_token='abc', + ), + firestore_admin.ListFieldsResponse( + fields=[], + next_page_token='def', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + ], + next_page_token='ghi', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_fields(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ExportDocumentsRequest, + dict, +]) +def test_export_documents(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.export_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.ExportDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_documents), + '__call__') as call: + client.export_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ExportDocumentsRequest() + + +def test_export_documents_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.ExportDocumentsRequest( + name='name_value', + output_uri_prefix='output_uri_prefix_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_documents), + '__call__') as call: + client.export_documents(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ExportDocumentsRequest( + name='name_value', + output_uri_prefix='output_uri_prefix_value', + ) + +@pytest.mark.asyncio +async def test_export_documents_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.export_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ExportDocumentsRequest() + +@pytest.mark.asyncio +async def test_export_documents_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ExportDocumentsRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.export_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.ExportDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_documents_async_from_dict(): + await test_export_documents_async(request_type=dict) + + +def test_export_documents_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ExportDocumentsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_documents), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.export_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_export_documents_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ExportDocumentsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_documents), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.export_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_export_documents_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.export_documents( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_export_documents_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_documents( + firestore_admin.ExportDocumentsRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_export_documents_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.export_documents( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_export_documents_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.export_documents( + firestore_admin.ExportDocumentsRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ImportDocumentsRequest, + dict, +]) +def test_import_documents(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.import_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.ImportDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_documents), + '__call__') as call: + client.import_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ImportDocumentsRequest() + + +def test_import_documents_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.ImportDocumentsRequest( + name='name_value', + input_uri_prefix='input_uri_prefix_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_documents), + '__call__') as call: + client.import_documents(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ImportDocumentsRequest( + name='name_value', + input_uri_prefix='input_uri_prefix_value', + ) + +@pytest.mark.asyncio +async def test_import_documents_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.import_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ImportDocumentsRequest() + +@pytest.mark.asyncio +async def test_import_documents_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ImportDocumentsRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.import_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.ImportDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_documents_async_from_dict(): + await test_import_documents_async(request_type=dict) + + +def test_import_documents_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ImportDocumentsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_documents), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.import_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_import_documents_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ImportDocumentsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_documents), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.import_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_import_documents_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.import_documents( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_import_documents_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_documents( + firestore_admin.ImportDocumentsRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_import_documents_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_documents), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.import_documents( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_import_documents_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.import_documents( + firestore_admin.ImportDocumentsRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.CreateDatabaseRequest, + dict, +]) +def test_create_database(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + client.create_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.CreateDatabaseRequest() + + +def test_create_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.CreateDatabaseRequest( + parent='parent_value', + database_id='database_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + client.create_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.CreateDatabaseRequest( + parent='parent_value', + database_id='database_id_value', + ) + +@pytest.mark.asyncio +async def test_create_database_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.CreateDatabaseRequest() + +@pytest.mark.asyncio +async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.CreateDatabaseRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_database_async_from_dict(): + await test_create_database_async(request_type=dict) + + +def test_create_database_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.CreateDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_database_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.CreateDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_database_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_database( + parent='parent_value', + database=gfa_database.Database(name='name_value'), + database_id='database_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].database + mock_val = gfa_database.Database(name='name_value') + assert arg == mock_val + arg = args[0].database_id + mock_val = 'database_id_value' + assert arg == mock_val + + +def test_create_database_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + firestore_admin.CreateDatabaseRequest(), + parent='parent_value', + database=gfa_database.Database(name='name_value'), + database_id='database_id_value', + ) + +@pytest.mark.asyncio +async def test_create_database_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_database( + parent='parent_value', + database=gfa_database.Database(name='name_value'), + database_id='database_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].database + mock_val = gfa_database.Database(name='name_value') + assert arg == mock_val + arg = args[0].database_id + mock_val = 'database_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_database_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_database( + firestore_admin.CreateDatabaseRequest(), + parent='parent_value', + database=gfa_database.Database(name='name_value'), + database_id='database_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetDatabaseRequest, + dict, +]) +def test_get_database(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = database.Database( + name='name_value', + uid='uid_value', + location_id='location_id_value', + type_=database.Database.DatabaseType.FIRESTORE_NATIVE, + concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, + point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, + app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, + key_prefix='key_prefix_value', + delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, + etag='etag_value', + ) + response = client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, database.Database) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.location_id == 'location_id_value' + assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE + assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC + assert response.point_in_time_recovery_enablement == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED + assert response.app_engine_integration_mode == database.Database.AppEngineIntegrationMode.ENABLED + assert response.key_prefix == 'key_prefix_value' + assert response.delete_protection_state == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED + assert response.etag == 'etag_value' + + +def test_get_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + client.get_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetDatabaseRequest() + + +def test_get_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.GetDatabaseRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + client.get_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetDatabaseRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_database_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(database.Database( + name='name_value', + uid='uid_value', + location_id='location_id_value', + type_=database.Database.DatabaseType.FIRESTORE_NATIVE, + concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, + point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, + app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, + key_prefix='key_prefix_value', + delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, + etag='etag_value', + )) + response = await client.get_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetDatabaseRequest() + +@pytest.mark.asyncio +async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetDatabaseRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(database.Database( + name='name_value', + uid='uid_value', + location_id='location_id_value', + type_=database.Database.DatabaseType.FIRESTORE_NATIVE, + concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, + point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, + app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, + key_prefix='key_prefix_value', + delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, + etag='etag_value', + )) + response = await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, database.Database) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.location_id == 'location_id_value' + assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE + assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC + assert response.point_in_time_recovery_enablement == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED + assert response.app_engine_integration_mode == database.Database.AppEngineIntegrationMode.ENABLED + assert response.key_prefix == 'key_prefix_value' + assert response.delete_protection_state == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_database_async_from_dict(): + await test_get_database_async(request_type=dict) + + +def test_get_database_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetDatabaseRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = database.Database() + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_database_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetDatabaseRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(database.Database()) + await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_database_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = database.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_database_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + firestore_admin.GetDatabaseRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_database_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = database.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(database.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_database_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_database( + firestore_admin.GetDatabaseRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListDatabasesRequest, + dict, +]) +def test_list_databases(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListDatabasesResponse( + unreachable=['unreachable_value'], + ) + response = client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListDatabasesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListDatabasesResponse) + assert response.unreachable == ['unreachable_value'] + + +def test_list_databases_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + client.list_databases() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListDatabasesRequest() + + +def test_list_databases_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.ListDatabasesRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + client.list_databases(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListDatabasesRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_databases_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListDatabasesResponse( + unreachable=['unreachable_value'], + )) + response = await client.list_databases() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListDatabasesRequest() + +@pytest.mark.asyncio +async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListDatabasesRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListDatabasesResponse( + unreachable=['unreachable_value'], + )) + response = await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListDatabasesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListDatabasesResponse) + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_databases_async_from_dict(): + await test_list_databases_async(request_type=dict) + + +def test_list_databases_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListDatabasesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = firestore_admin.ListDatabasesResponse() + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_databases_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListDatabasesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListDatabasesResponse()) + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_databases_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListDatabasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_databases( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_databases_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + firestore_admin.ListDatabasesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_databases_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListDatabasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListDatabasesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_databases( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_databases_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_databases( + firestore_admin.ListDatabasesRequest(), + parent='parent_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.UpdateDatabaseRequest, + dict, +]) +def test_update_database(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.UpdateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + client.update_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.UpdateDatabaseRequest() + + +def test_update_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.UpdateDatabaseRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + client.update_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.UpdateDatabaseRequest( + ) + +@pytest.mark.asyncio +async def test_update_database_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.UpdateDatabaseRequest() + +@pytest.mark.asyncio +async def test_update_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.UpdateDatabaseRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.UpdateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_database_async_from_dict(): + await test_update_database_async(request_type=dict) + + +def test_update_database_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.UpdateDatabaseRequest() + + request.database.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_database_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.UpdateDatabaseRequest() + + request.database.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database.name=name_value', + ) in kw['metadata'] + + +def test_update_database_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_database( + database=gfa_database.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = gfa_database.Database(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_database_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + firestore_admin.UpdateDatabaseRequest(), + database=gfa_database.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_database_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_database( + database=gfa_database.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = gfa_database.Database(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_database_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_database( + firestore_admin.UpdateDatabaseRequest(), + database=gfa_database.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.DeleteDatabaseRequest, + dict, +]) +def test_delete_database(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.DeleteDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_database), + '__call__') as call: + client.delete_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteDatabaseRequest() + + +def test_delete_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.DeleteDatabaseRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_database), + '__call__') as call: + client.delete_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteDatabaseRequest( + name='name_value', + etag='etag_value', + ) + +@pytest.mark.asyncio +async def test_delete_database_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteDatabaseRequest() + +@pytest.mark.asyncio +async def test_delete_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.DeleteDatabaseRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.DeleteDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_database_async_from_dict(): + await test_delete_database_async(request_type=dict) + + +def test_delete_database_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.DeleteDatabaseRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_database_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.DeleteDatabaseRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_database_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_database_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_database( + firestore_admin.DeleteDatabaseRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_database_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_database_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_database( + firestore_admin.DeleteDatabaseRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetBackupRequest, + dict, +]) +def test_get_backup(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup( + name='name_value', + database='database_value', + database_uid='database_uid_value', + state=backup.Backup.State.CREATING, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.name == 'name_value' + assert response.database == 'database_value' + assert response.database_uid == 'database_uid_value' + assert response.state == backup.Backup.State.CREATING + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetBackupRequest() + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.GetBackupRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetBackupRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( + name='name_value', + database='database_value', + database_uid='database_uid_value', + state=backup.Backup.State.CREATING, + )) + response = await client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetBackupRequest() + +@pytest.mark.asyncio +async def test_get_backup_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetBackupRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( + name='name_value', + database='database_value', + database_uid='database_uid_value', + state=backup.Backup.State.CREATING, + )) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.name == 'name_value' + assert response.database == 'database_value' + assert response.database_uid == 'database_uid_value' + assert response.state == backup.Backup.State.CREATING + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = backup.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_backup_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + firestore_admin.GetBackupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + firestore_admin.GetBackupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListBackupsRequest, + dict, +]) +def test_list_backups(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListBackupsResponse( + unreachable=['unreachable_value'], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListBackupsResponse) + assert response.unreachable == ['unreachable_value'] + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListBackupsRequest() + + +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.ListBackupsRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListBackupsRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backups_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupsResponse( + unreachable=['unreachable_value'], + )) + response = await client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListBackupsRequest() + +@pytest.mark.asyncio +async def test_list_backups_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListBackupsRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupsResponse( + unreachable=['unreachable_value'], + )) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListBackupsResponse) + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListBackupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = firestore_admin.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListBackupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupsResponse()) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_backups_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + firestore_admin.ListBackupsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + firestore_admin.ListBackupsRequest(), + parent='parent_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.DeleteBackupRequest, + dict, +]) +def test_delete_backup(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteBackupRequest() + + +def test_delete_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.DeleteBackupRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + client.delete_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteBackupRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteBackupRequest() + +@pytest.mark.asyncio +async def test_delete_backup_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.DeleteBackupRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.DeleteBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = None + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.DeleteBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_backup_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + firestore_admin.DeleteBackupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + firestore_admin.DeleteBackupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.RestoreDatabaseRequest, + dict, +]) +def test_restore_database(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.RestoreDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + client.restore_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.RestoreDatabaseRequest() + + +def test_restore_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.RestoreDatabaseRequest( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + client.restore_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.RestoreDatabaseRequest( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + +@pytest.mark.asyncio +async def test_restore_database_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.restore_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.RestoreDatabaseRequest() + +@pytest.mark.asyncio +async def test_restore_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.RestoreDatabaseRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.RestoreDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_database_async_from_dict(): + await test_restore_database_async(request_type=dict) + + +def test_restore_database_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.RestoreDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_restore_database_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.RestoreDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.CreateBackupScheduleRequest, + dict, +]) +def test_create_backup_schedule(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.BackupSchedule( + name='name_value', + ) + response = client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.CreateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_create_backup_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + client.create_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.CreateBackupScheduleRequest() + + +def test_create_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.CreateBackupScheduleRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + client.create_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.CreateBackupScheduleRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_create_backup_schedule_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( + name='name_value', + )) + response = await client.create_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.CreateBackupScheduleRequest() + +@pytest.mark.asyncio +async def test_create_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.CreateBackupScheduleRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( + name='name_value', + )) + response = await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.CreateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_backup_schedule_async_from_dict(): + await test_create_backup_schedule_async(request_type=dict) + + +def test_create_backup_schedule_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.CreateBackupScheduleRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value = schedule.BackupSchedule() + client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_backup_schedule_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.CreateBackupScheduleRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) + await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_backup_schedule_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_schedule( + parent='parent_value', + backup_schedule=schedule.BackupSchedule(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_schedule + mock_val = schedule.BackupSchedule(name='name_value') + assert arg == mock_val + + +def test_create_backup_schedule_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_schedule( + firestore_admin.CreateBackupScheduleRequest(), + parent='parent_value', + backup_schedule=schedule.BackupSchedule(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_backup_schedule_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_schedule( + parent='parent_value', + backup_schedule=schedule.BackupSchedule(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_schedule + mock_val = schedule.BackupSchedule(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_backup_schedule_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_schedule( + firestore_admin.CreateBackupScheduleRequest(), + parent='parent_value', + backup_schedule=schedule.BackupSchedule(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetBackupScheduleRequest, + dict, +]) +def test_get_backup_schedule(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.BackupSchedule( + name='name_value', + ) + response = client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_get_backup_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + client.get_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetBackupScheduleRequest() + + +def test_get_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.GetBackupScheduleRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + client.get_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetBackupScheduleRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_backup_schedule_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( + name='name_value', + )) + response = await client.get_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.GetBackupScheduleRequest() + +@pytest.mark.asyncio +async def test_get_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetBackupScheduleRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( + name='name_value', + )) + response = await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.GetBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_backup_schedule_async_from_dict(): + await test_get_backup_schedule_async(request_type=dict) + + +def test_get_backup_schedule_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value = schedule.BackupSchedule() + client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_backup_schedule_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.GetBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) + await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_backup_schedule_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_backup_schedule_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_schedule( + firestore_admin.GetBackupScheduleRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_backup_schedule_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_backup_schedule_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_schedule( + firestore_admin.GetBackupScheduleRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListBackupSchedulesRequest, + dict, +]) +def test_list_backup_schedules(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListBackupSchedulesResponse( + ) + response = client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListBackupSchedulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) + + +def test_list_backup_schedules_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + client.list_backup_schedules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListBackupSchedulesRequest() + + +def test_list_backup_schedules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.ListBackupSchedulesRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + client.list_backup_schedules(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListBackupSchedulesRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backup_schedules_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupSchedulesResponse( + )) + response = await client.list_backup_schedules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.ListBackupSchedulesRequest() + +@pytest.mark.asyncio +async def test_list_backup_schedules_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListBackupSchedulesRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupSchedulesResponse( + )) + response = await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.ListBackupSchedulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_from_dict(): + await test_list_backup_schedules_async(request_type=dict) + + +def test_list_backup_schedules_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListBackupSchedulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value = firestore_admin.ListBackupSchedulesResponse() + client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backup_schedules_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.ListBackupSchedulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupSchedulesResponse()) + await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_backup_schedules_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListBackupSchedulesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_schedules( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_backup_schedules_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_schedules( + firestore_admin.ListBackupSchedulesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backup_schedules_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = firestore_admin.ListBackupSchedulesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupSchedulesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_schedules( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_backup_schedules_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_schedules( + firestore_admin.ListBackupSchedulesRequest(), + parent='parent_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.UpdateBackupScheduleRequest, + dict, +]) +def test_update_backup_schedule(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.BackupSchedule( + name='name_value', + ) + response = client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.UpdateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_update_backup_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + client.update_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.UpdateBackupScheduleRequest() + + +def test_update_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.UpdateBackupScheduleRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + client.update_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.UpdateBackupScheduleRequest( + ) + +@pytest.mark.asyncio +async def test_update_backup_schedule_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( + name='name_value', + )) + response = await client.update_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.UpdateBackupScheduleRequest() + +@pytest.mark.asyncio +async def test_update_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.UpdateBackupScheduleRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( + name='name_value', + )) + response = await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.UpdateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_update_backup_schedule_async_from_dict(): + await test_update_backup_schedule_async(request_type=dict) + + +def test_update_backup_schedule_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.UpdateBackupScheduleRequest() + + request.backup_schedule.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value = schedule.BackupSchedule() + client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup_schedule.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_backup_schedule_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.UpdateBackupScheduleRequest() + + request.backup_schedule.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) + await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup_schedule.name=name_value', + ) in kw['metadata'] + + +def test_update_backup_schedule_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_schedule( + backup_schedule=schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_schedule + mock_val = schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_backup_schedule_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_schedule( + firestore_admin.UpdateBackupScheduleRequest(), + backup_schedule=schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_backup_schedule_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_schedule( + backup_schedule=schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_schedule + mock_val = schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_backup_schedule_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_schedule( + firestore_admin.UpdateBackupScheduleRequest(), + backup_schedule=schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.DeleteBackupScheduleRequest, + dict, +]) +def test_delete_backup_schedule(request_type, transport: str = 'grpc'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore_admin.DeleteBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + client.delete_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteBackupScheduleRequest() + + +def test_delete_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore_admin.DeleteBackupScheduleRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + client.delete_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteBackupScheduleRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_backup_schedule_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore_admin.DeleteBackupScheduleRequest() + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.DeleteBackupScheduleRequest): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore_admin.DeleteBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async_from_dict(): + await test_delete_backup_schedule_async(request_type=dict) + + +def test_delete_backup_schedule_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.DeleteBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value = None + client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firestore_admin.DeleteBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_backup_schedule_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_backup_schedule_flattened_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_schedule( + firestore_admin.DeleteBackupScheduleRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_backup_schedule_flattened_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_backup_schedule_flattened_error_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_schedule( + firestore_admin.DeleteBackupScheduleRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.CreateIndexRequest, + dict, +]) +def test_create_index_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + request_init["index"] = {'name': 'name_value', 'query_scope': 1, 'api_scope': 1, 'fields': [{'field_path': 'field_path_value', 'order': 1, 'array_config': 1, 'vector_config': {'dimension': 966, 'flat': {}}}], 'state': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.CreateIndexRequest.meta.fields["index"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["index"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["index"][field])): + del request_init["index"][field][i][subfield] + else: + del request_init["index"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_index(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_index_rest_required_fields(request_type=firestore_admin.CreateIndexRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_index._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_index._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_index(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_index_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_index._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "index", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_index_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_create_index") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_create_index") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.CreateIndexRequest.pb(firestore_admin.CreateIndexRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = firestore_admin.CreateIndexRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_index(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_index_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.CreateIndexRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_index(request) + + +def test_create_index_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + index=gfa_index.Index(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_index(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes" % client.transport._host, args[1]) + + +def test_create_index_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_index( + firestore_admin.CreateIndexRequest(), + parent='parent_value', + index=gfa_index.Index(name='name_value'), + ) + + +def test_create_index_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListIndexesRequest, + dict, +]) +def test_list_indexes_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListIndexesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListIndexesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_indexes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIndexesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_indexes_rest_required_fields(request_type=firestore_admin.ListIndexesRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_indexes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_indexes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListIndexesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListIndexesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_indexes(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_indexes_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_indexes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_indexes_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_indexes") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_indexes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListIndexesRequest.pb(firestore_admin.ListIndexesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore_admin.ListIndexesResponse.to_json(firestore_admin.ListIndexesResponse()) + + request = firestore_admin.ListIndexesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListIndexesResponse() + + client.list_indexes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_indexes_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListIndexesRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_indexes(request) + + +def test_list_indexes_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListIndexesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListIndexesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_indexes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes" % client.transport._host, args[1]) + + +def test_list_indexes_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_indexes( + firestore_admin.ListIndexesRequest(), + parent='parent_value', + ) + + +def test_list_indexes_rest_pager(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], + next_page_token='abc', + ), + firestore_admin.ListIndexesResponse( + indexes=[], + next_page_token='def', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + ], + next_page_token='ghi', + ), + firestore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(firestore_admin.ListIndexesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + + pager = client.list_indexes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, index.Index) + for i in results) + + pages = list(client.list_indexes(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetIndexRequest, + dict, +]) +def test_get_index_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = index.Index( + name='name_value', + query_scope=index.Index.QueryScope.COLLECTION, + api_scope=index.Index.ApiScope.DATASTORE_MODE_API, + state=index.Index.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = index.Index.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_index(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, index.Index) + assert response.name == 'name_value' + assert response.query_scope == index.Index.QueryScope.COLLECTION + assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API + assert response.state == index.Index.State.CREATING + + +def test_get_index_rest_required_fields(request_type=firestore_admin.GetIndexRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_index._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_index._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = index.Index() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = index.Index.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_index(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_index_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_index._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_index_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_index") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_index") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetIndexRequest.pb(firestore_admin.GetIndexRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = index.Index.to_json(index.Index()) + + request = firestore_admin.GetIndexRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = index.Index() + + client.get_index(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_index_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetIndexRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_index(request) + + +def test_get_index_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = index.Index() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = index.Index.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_index(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}" % client.transport._host, args[1]) + + +def test_get_index_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_index( + firestore_admin.GetIndexRequest(), + name='name_value', + ) + + +def test_get_index_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.DeleteIndexRequest, + dict, +]) +def test_delete_index_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_index(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_index_rest_required_fields(request_type=firestore_admin.DeleteIndexRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_index._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_index._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_index(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_index_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_index._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_index_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_delete_index") as pre: + pre.assert_not_called() + pb_message = firestore_admin.DeleteIndexRequest.pb(firestore_admin.DeleteIndexRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = firestore_admin.DeleteIndexRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_index(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_index_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.DeleteIndexRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_index(request) + + +def test_delete_index_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_index(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}" % client.transport._host, args[1]) + + +def test_delete_index_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_index( + firestore_admin.DeleteIndexRequest(), + name='name_value', + ) + + +def test_delete_index_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetFieldRequest, + dict, +]) +def test_get_field_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = field.Field( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = field.Field.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_field(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, field.Field) + assert response.name == 'name_value' + + +def test_get_field_rest_required_fields(request_type=firestore_admin.GetFieldRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_field._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_field._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = field.Field() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = field.Field.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_field(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_field_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_field._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_field_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_field") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_field") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetFieldRequest.pb(firestore_admin.GetFieldRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = field.Field.to_json(field.Field()) + + request = firestore_admin.GetFieldRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = field.Field() + + client.get_field(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_field_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetFieldRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_field(request) + + +def test_get_field_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = field.Field() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = field.Field.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_field(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}" % client.transport._host, args[1]) + + +def test_get_field_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_field( + firestore_admin.GetFieldRequest(), + name='name_value', + ) + + +def test_get_field_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.UpdateFieldRequest, + dict, +]) +def test_update_field_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'field': {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'}} + request_init["field"] = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4', 'index_config': {'indexes': [{'name': 'name_value', 'query_scope': 1, 'api_scope': 1, 'fields': [{'field_path': 'field_path_value', 'order': 1, 'array_config': 1, 'vector_config': {'dimension': 966, 'flat': {}}}], 'state': 1}], 'uses_ancestor_config': True, 'ancestor_field': 'ancestor_field_value', 'reverting': True}, 'ttl_config': {'state': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.UpdateFieldRequest.meta.fields["field"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["field"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["field"][field])): + del request_init["field"][field][i][subfield] + else: + del request_init["field"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_field(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_field_rest_required_fields(request_type=firestore_admin.UpdateFieldRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_field._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_field._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_field(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_field_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_field._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("field", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_field_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_update_field") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_update_field") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.UpdateFieldRequest.pb(firestore_admin.UpdateFieldRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = firestore_admin.UpdateFieldRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_field(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_field_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.UpdateFieldRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'field': {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_field(request) + + +def test_update_field_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'field': {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + field=gfa_field.Field(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_field(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}" % client.transport._host, args[1]) + + +def test_update_field_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_field( + firestore_admin.UpdateFieldRequest(), + field=gfa_field.Field(name='name_value'), + ) + + +def test_update_field_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListFieldsRequest, + dict, +]) +def test_list_fields_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListFieldsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListFieldsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_fields(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFieldsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_fields_rest_required_fields(request_type=firestore_admin.ListFieldsRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_fields._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_fields._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListFieldsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListFieldsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_fields(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_fields_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_fields._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_fields_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_fields") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_fields") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListFieldsRequest.pb(firestore_admin.ListFieldsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore_admin.ListFieldsResponse.to_json(firestore_admin.ListFieldsResponse()) + + request = firestore_admin.ListFieldsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListFieldsResponse() + + client.list_fields(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_fields_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListFieldsRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_fields(request) + + +def test_list_fields_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListFieldsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListFieldsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_fields(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields" % client.transport._host, args[1]) + + +def test_list_fields_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_fields( + firestore_admin.ListFieldsRequest(), + parent='parent_value', + ) + + +def test_list_fields_rest_pager(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + field.Field(), + ], + next_page_token='abc', + ), + firestore_admin.ListFieldsResponse( + fields=[], + next_page_token='def', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + ], + next_page_token='ghi', + ), + firestore_admin.ListFieldsResponse( + fields=[ + field.Field(), + field.Field(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(firestore_admin.ListFieldsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} + + pager = client.list_fields(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, field.Field) + for i in results) + + pages = list(client.list_fields(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ExportDocumentsRequest, + dict, +]) +def test_export_documents_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.export_documents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_documents_rest_required_fields(request_type=firestore_admin.ExportDocumentsRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.export_documents(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_export_documents_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.export_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_documents_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_export_documents") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_export_documents") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ExportDocumentsRequest.pb(firestore_admin.ExportDocumentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = firestore_admin.ExportDocumentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_documents(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_documents_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ExportDocumentsRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_documents(request) + + +def test_export_documents_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.export_documents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*}:exportDocuments" % client.transport._host, args[1]) + + +def test_export_documents_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_documents( + firestore_admin.ExportDocumentsRequest(), + name='name_value', + ) + + +def test_export_documents_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ImportDocumentsRequest, + dict, +]) +def test_import_documents_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.import_documents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_documents_rest_required_fields(request_type=firestore_admin.ImportDocumentsRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.import_documents(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_import_documents_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_documents_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_import_documents") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_import_documents") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ImportDocumentsRequest.pb(firestore_admin.ImportDocumentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = firestore_admin.ImportDocumentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_documents(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_documents_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ImportDocumentsRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_documents(request) + + +def test_import_documents_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.import_documents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*}:importDocuments" % client.transport._host, args[1]) + + +def test_import_documents_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_documents( + firestore_admin.ImportDocumentsRequest(), + name='name_value', + ) + + +def test_import_documents_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.CreateDatabaseRequest, + dict, +]) +def test_create_database_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request_init["database"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'location_id': 'location_id_value', 'type_': 1, 'concurrency_mode': 1, 'version_retention_period': {'seconds': 751, 'nanos': 543}, 'earliest_version_time': {}, 'point_in_time_recovery_enablement': 1, 'app_engine_integration_mode': 1, 'key_prefix': 'key_prefix_value', 'delete_protection_state': 1, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.CreateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_database_rest_required_fields(request_type=firestore_admin.CreateDatabaseRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["database_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "databaseId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == request_init["database_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["databaseId"] = 'database_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("database_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == 'database_id_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_database(request) + + expected_params = [ + ( + "databaseId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_database_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("databaseId", )) & set(("parent", "database", "databaseId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_create_database") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_create_database") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.CreateDatabaseRequest.pb(firestore_admin.CreateDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = firestore_admin.CreateDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.CreateDatabaseRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_database(request) + + +def test_create_database_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + database=gfa_database.Database(name='name_value'), + database_id='database_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/databases" % client.transport._host, args[1]) + + +def test_create_database_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + firestore_admin.CreateDatabaseRequest(), + parent='parent_value', + database=gfa_database.Database(name='name_value'), + database_id='database_id_value', + ) + + +def test_create_database_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetDatabaseRequest, + dict, +]) +def test_get_database_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = database.Database( + name='name_value', + uid='uid_value', + location_id='location_id_value', + type_=database.Database.DatabaseType.FIRESTORE_NATIVE, + concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, + point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, + app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, + key_prefix='key_prefix_value', + delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = database.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, database.Database) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.location_id == 'location_id_value' + assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE + assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC + assert response.point_in_time_recovery_enablement == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED + assert response.app_engine_integration_mode == database.Database.AppEngineIntegrationMode.ENABLED + assert response.key_prefix == 'key_prefix_value' + assert response.delete_protection_state == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED + assert response.etag == 'etag_value' + + +def test_get_database_rest_required_fields(request_type=firestore_admin.GetDatabaseRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = database.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = database.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_database_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_database") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_database") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetDatabaseRequest.pb(firestore_admin.GetDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = database.Database.to_json(database.Database()) + + request = firestore_admin.GetDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = database.Database() + + client.get_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetDatabaseRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_database(request) + + +def test_get_database_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = database.Database() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = database.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*}" % client.transport._host, args[1]) + + +def test_get_database_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + firestore_admin.GetDatabaseRequest(), + name='name_value', + ) + + +def test_get_database_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListDatabasesRequest, + dict, +]) +def test_list_databases_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListDatabasesResponse( + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListDatabasesResponse) + assert response.unreachable == ['unreachable_value'] + + +def test_list_databases_rest_required_fields(request_type=firestore_admin.ListDatabasesRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_databases(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_databases_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_databases._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_databases_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_databases") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_databases") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListDatabasesRequest.pb(firestore_admin.ListDatabasesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore_admin.ListDatabasesResponse.to_json(firestore_admin.ListDatabasesResponse()) + + request = firestore_admin.ListDatabasesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListDatabasesResponse() + + client.list_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_databases_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListDatabasesRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_databases(request) + + +def test_list_databases_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/databases" % client.transport._host, args[1]) + + +def test_list_databases_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + firestore_admin.ListDatabasesRequest(), + parent='parent_value', + ) + + +def test_list_databases_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.UpdateDatabaseRequest, + dict, +]) +def test_update_database_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'database': {'name': 'projects/sample1/databases/sample2'}} + request_init["database"] = {'name': 'projects/sample1/databases/sample2', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'location_id': 'location_id_value', 'type_': 1, 'concurrency_mode': 1, 'version_retention_period': {'seconds': 751, 'nanos': 543}, 'earliest_version_time': {}, 'point_in_time_recovery_enablement': 1, 'app_engine_integration_mode': 1, 'key_prefix': 'key_prefix_value', 'delete_protection_state': 1, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.UpdateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_database_rest_required_fields(request_type=firestore_admin.UpdateDatabaseRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_database_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("database", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_update_database") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_update_database") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.UpdateDatabaseRequest.pb(firestore_admin.UpdateDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = firestore_admin.UpdateDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.UpdateDatabaseRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'database': {'name': 'projects/sample1/databases/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_database(request) + + +def test_update_database_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'database': {'name': 'projects/sample1/databases/sample2'}} + + # get truthy value for each flattened field + mock_args = dict( + database=gfa_database.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database.name=projects/*/databases/*}" % client.transport._host, args[1]) + + +def test_update_database_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + firestore_admin.UpdateDatabaseRequest(), + database=gfa_database.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_database_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.DeleteDatabaseRequest, + dict, +]) +def test_delete_database_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_database_rest_required_fields(request_type=firestore_admin.DeleteDatabaseRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_database_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_delete_database") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_delete_database") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.DeleteDatabaseRequest.pb(firestore_admin.DeleteDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = firestore_admin.DeleteDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.DeleteDatabaseRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_database(request) + + +def test_delete_database_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*}" % client.transport._host, args[1]) + + +def test_delete_database_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_database( + firestore_admin.DeleteDatabaseRequest(), + name='name_value', + ) + + +def test_delete_database_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetBackupRequest, + dict, +]) +def test_get_backup_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.Backup( + name='name_value', + database='database_value', + database_uid='database_uid_value', + state=backup.Backup.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.name == 'name_value' + assert response.database == 'database_value' + assert response.database_uid == 'database_uid_value' + assert response.state == backup.Backup.State.CREATING + + +def test_get_backup_rest_required_fields(request_type=firestore_admin.GetBackupRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_backup") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetBackupRequest.pb(firestore_admin.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backup.Backup.to_json(backup.Backup()) + + request = firestore_admin.GetBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup.Backup() + + client.get_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetBackupRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup(request) + + +def test_get_backup_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/backups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, args[1]) + + +def test_get_backup_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + firestore_admin.GetBackupRequest(), + name='name_value', + ) + + +def test_get_backup_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListBackupsRequest, + dict, +]) +def test_list_backups_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListBackupsResponse( + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListBackupsResponse) + assert response.unreachable == ['unreachable_value'] + + +def test_list_backups_rest_required_fields(request_type=firestore_admin.ListBackupsRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_backups(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_backups") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_backups") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListBackupsRequest.pb(firestore_admin.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore_admin.ListBackupsResponse.to_json(firestore_admin.ListBackupsResponse()) + + request = firestore_admin.ListBackupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListBackupsResponse() + + client.list_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backups_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListBackupsRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backups(request) + + +def test_list_backups_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/backups" % client.transport._host, args[1]) + + +def test_list_backups_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + firestore_admin.ListBackupsRequest(), + parent='parent_value', + ) + + +def test_list_backups_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.DeleteBackupRequest, + dict, +]) +def test_delete_backup_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_rest_required_fields(request_type=firestore_admin.DeleteBackupRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_delete_backup") as pre: + pre.assert_not_called() + pb_message = firestore_admin.DeleteBackupRequest.pb(firestore_admin.DeleteBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = firestore_admin.DeleteBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_backup_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.DeleteBackupRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup(request) + + +def test_delete_backup_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/backups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, args[1]) + + +def test_delete_backup_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + firestore_admin.DeleteBackupRequest(), + name='name_value', + ) + + +def test_delete_backup_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.RestoreDatabaseRequest, + dict, +]) +def test_restore_database_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.restore_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_database_rest_required_fields(request_type=firestore_admin.RestoreDatabaseRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["database_id"] = "" + request_init["backup"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["databaseId"] = 'database_id_value' + jsonified_request["backup"] = 'backup_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == 'database_id_value' + assert "backup" in jsonified_request + assert jsonified_request["backup"] == 'backup_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.restore_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_restore_database_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.restore_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "databaseId", "backup", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_database_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_restore_database") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_restore_database") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.RestoreDatabaseRequest.pb(firestore_admin.RestoreDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = firestore_admin.RestoreDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.RestoreDatabaseRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_database(request) + + +def test_restore_database_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.CreateBackupScheduleRequest, + dict, +]) +def test_create_backup_schedule_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2'} + request_init["backup_schedule"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention': {'seconds': 751, 'nanos': 543}, 'daily_recurrence': {}, 'weekly_recurrence': {'day': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.CreateBackupScheduleRequest.meta.fields["backup_schedule"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_create_backup_schedule_rest_required_fields(request_type=firestore_admin.CreateBackupScheduleRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_backup_schedule_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "backupSchedule", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_create_backup_schedule") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_create_backup_schedule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.CreateBackupScheduleRequest.pb(firestore_admin.CreateBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) + + request = firestore_admin.CreateBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schedule.BackupSchedule() + + client.create_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_schedule_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.CreateBackupScheduleRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_schedule(request) + + +def test_create_backup_schedule_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/databases/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + backup_schedule=schedule.BackupSchedule(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/databases/*}/backupSchedules" % client.transport._host, args[1]) + + +def test_create_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_schedule( + firestore_admin.CreateBackupScheduleRequest(), + parent='parent_value', + backup_schedule=schedule.BackupSchedule(name='name_value'), + ) + + +def test_create_backup_schedule_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.GetBackupScheduleRequest, + dict, +]) +def test_get_backup_schedule_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_get_backup_schedule_rest_required_fields(request_type=firestore_admin.GetBackupScheduleRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_backup_schedule_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_backup_schedule") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_backup_schedule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.GetBackupScheduleRequest.pb(firestore_admin.GetBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) + + request = firestore_admin.GetBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schedule.BackupSchedule() + + client.get_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_schedule_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetBackupScheduleRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup_schedule(request) + + +def test_get_backup_schedule_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) + + +def test_get_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_schedule( + firestore_admin.GetBackupScheduleRequest(), + name='name_value', + ) + + +def test_get_backup_schedule_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.ListBackupSchedulesRequest, + dict, +]) +def test_list_backup_schedules_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListBackupSchedulesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_backup_schedules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) + + +def test_list_backup_schedules_rest_required_fields(request_type=firestore_admin.ListBackupSchedulesRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListBackupSchedulesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_backup_schedules(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_backup_schedules_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_schedules_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_backup_schedules") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_backup_schedules") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.ListBackupSchedulesRequest.pb(firestore_admin.ListBackupSchedulesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = firestore_admin.ListBackupSchedulesResponse.to_json(firestore_admin.ListBackupSchedulesResponse()) + + request = firestore_admin.ListBackupSchedulesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore_admin.ListBackupSchedulesResponse() + + client.list_backup_schedules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_schedules_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListBackupSchedulesRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backup_schedules(request) + + +def test_list_backup_schedules_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = firestore_admin.ListBackupSchedulesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/databases/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_backup_schedules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/databases/*}/backupSchedules" % client.transport._host, args[1]) + + +def test_list_backup_schedules_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_schedules( + firestore_admin.ListBackupSchedulesRequest(), + parent='parent_value', + ) + + +def test_list_backup_schedules_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.UpdateBackupScheduleRequest, + dict, +]) +def test_update_backup_schedule_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'backup_schedule': {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'}} + request_init["backup_schedule"] = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention': {'seconds': 751, 'nanos': 543}, 'daily_recurrence': {}, 'weekly_recurrence': {'day': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firestore_admin.UpdateBackupScheduleRequest.meta.fields["backup_schedule"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_update_backup_schedule_rest_required_fields(request_type=firestore_admin.UpdateBackupScheduleRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_backup_schedule_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("backupSchedule", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_update_backup_schedule") as post, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_update_backup_schedule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = firestore_admin.UpdateBackupScheduleRequest.pb(firestore_admin.UpdateBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) + + request = firestore_admin.UpdateBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schedule.BackupSchedule() + + client.update_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_schedule_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.UpdateBackupScheduleRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'backup_schedule': {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup_schedule(request) + + +def test_update_backup_schedule_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = {'backup_schedule': {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + backup_schedule=schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) + + +def test_update_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_schedule( + firestore_admin.UpdateBackupScheduleRequest(), + backup_schedule=schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_backup_schedule_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + firestore_admin.DeleteBackupScheduleRequest, + dict, +]) +def test_delete_backup_schedule_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_schedule_rest_required_fields(request_type=firestore_admin.DeleteBackupScheduleRequest): + transport_class = transports.FirestoreAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_backup_schedule_rest_unset_required_fields(): + transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.FirestoreAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), + ) + client = FirestoreAdminClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_delete_backup_schedule") as pre: + pre.assert_not_called() + pb_message = firestore_admin.DeleteBackupScheduleRequest.pb(firestore_admin.DeleteBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = firestore_admin.DeleteBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_backup_schedule_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.DeleteBackupScheduleRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup_schedule(request) + + +def test_delete_backup_schedule_rest_flattened(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) + + +def test_delete_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_schedule( + firestore_admin.DeleteBackupScheduleRequest(), + name='name_value', + ) + + +def test_delete_backup_schedule_rest_error(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirestoreAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirestoreAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirestoreAdminClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirestoreAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FirestoreAdminClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirestoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.FirestoreAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.FirestoreAdminGrpcTransport, + transports.FirestoreAdminGrpcAsyncIOTransport, + transports.FirestoreAdminRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = FirestoreAdminClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.FirestoreAdminGrpcTransport, + ) + +def test_firestore_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.FirestoreAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_firestore_admin_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.FirestoreAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_index', + 'list_indexes', + 'get_index', + 'delete_index', + 'get_field', + 'update_field', + 'list_fields', + 'export_documents', + 'import_documents', + 'create_database', + 'get_database', + 'list_databases', + 'update_database', + 'delete_database', + 'get_backup', + 'list_backups', + 'delete_backup', + 'restore_database', + 'create_backup_schedule', + 'get_backup_schedule', + 'list_backup_schedules', + 'update_backup_schedule', + 'delete_backup_schedule', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_firestore_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirestoreAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', +), + quota_project_id="octopus", + ) + + +def test_firestore_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirestoreAdminTransport() + adc.assert_called_once() + + +def test_firestore_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FirestoreAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirestoreAdminGrpcTransport, + transports.FirestoreAdminGrpcAsyncIOTransport, + ], +) +def test_firestore_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/datastore',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirestoreAdminGrpcTransport, + transports.FirestoreAdminGrpcAsyncIOTransport, + transports.FirestoreAdminRestTransport, + ], +) +def test_firestore_admin_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FirestoreAdminGrpcTransport, grpc_helpers), + (transports.FirestoreAdminGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_firestore_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', +), + scopes=["1", "2"], + default_host="firestore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.FirestoreAdminGrpcTransport, transports.FirestoreAdminGrpcAsyncIOTransport]) +def test_firestore_admin_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_firestore_admin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.FirestoreAdminRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_firestore_admin_rest_lro_client(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_firestore_admin_host_no_port(transport_name): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='firestore.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'firestore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://firestore.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_firestore_admin_host_with_port(transport_name): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='firestore.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'firestore.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://firestore.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_firestore_admin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FirestoreAdminClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FirestoreAdminClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_index._session + session2 = client2.transport.create_index._session + assert session1 != session2 + session1 = client1.transport.list_indexes._session + session2 = client2.transport.list_indexes._session + assert session1 != session2 + session1 = client1.transport.get_index._session + session2 = client2.transport.get_index._session + assert session1 != session2 + session1 = client1.transport.delete_index._session + session2 = client2.transport.delete_index._session + assert session1 != session2 + session1 = client1.transport.get_field._session + session2 = client2.transport.get_field._session + assert session1 != session2 + session1 = client1.transport.update_field._session + session2 = client2.transport.update_field._session + assert session1 != session2 + session1 = client1.transport.list_fields._session + session2 = client2.transport.list_fields._session + assert session1 != session2 + session1 = client1.transport.export_documents._session + session2 = client2.transport.export_documents._session + assert session1 != session2 + session1 = client1.transport.import_documents._session + session2 = client2.transport.import_documents._session + assert session1 != session2 + session1 = client1.transport.create_database._session + session2 = client2.transport.create_database._session + assert session1 != session2 + session1 = client1.transport.get_database._session + session2 = client2.transport.get_database._session + assert session1 != session2 + session1 = client1.transport.list_databases._session + session2 = client2.transport.list_databases._session + assert session1 != session2 + session1 = client1.transport.update_database._session + session2 = client2.transport.update_database._session + assert session1 != session2 + session1 = client1.transport.delete_database._session + session2 = client2.transport.delete_database._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.restore_database._session + session2 = client2.transport.restore_database._session + assert session1 != session2 + session1 = client1.transport.create_backup_schedule._session + session2 = client2.transport.create_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.get_backup_schedule._session + session2 = client2.transport.get_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.list_backup_schedules._session + session2 = client2.transport.list_backup_schedules._session + assert session1 != session2 + session1 = client1.transport.update_backup_schedule._session + session2 = client2.transport.update_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.delete_backup_schedule._session + session2 = client2.transport.delete_backup_schedule._session + assert session1 != session2 +def test_firestore_admin_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FirestoreAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_firestore_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FirestoreAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.FirestoreAdminGrpcTransport, transports.FirestoreAdminGrpcAsyncIOTransport]) +def test_firestore_admin_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.FirestoreAdminGrpcTransport, transports.FirestoreAdminGrpcAsyncIOTransport]) +def test_firestore_admin_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_firestore_admin_grpc_lro_client(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_firestore_admin_grpc_lro_async_client(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_backup_path(): + project = "squid" + location = "clam" + backup = "whelk" + expected = "projects/{project}/locations/{location}/backups/{backup}".format(project=project, location=location, backup=backup, ) + actual = FirestoreAdminClient.backup_path(project, location, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "octopus", + "location": "oyster", + "backup": "nudibranch", + } + path = FirestoreAdminClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_backup_path(path) + assert expected == actual + +def test_backup_schedule_path(): + project = "cuttlefish" + database = "mussel" + backup_schedule = "winkle" + expected = "projects/{project}/databases/{database}/backupSchedules/{backup_schedule}".format(project=project, database=database, backup_schedule=backup_schedule, ) + actual = FirestoreAdminClient.backup_schedule_path(project, database, backup_schedule) + assert expected == actual + + +def test_parse_backup_schedule_path(): + expected = { + "project": "nautilus", + "database": "scallop", + "backup_schedule": "abalone", + } + path = FirestoreAdminClient.backup_schedule_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_backup_schedule_path(path) + assert expected == actual + +def test_collection_group_path(): + project = "squid" + database = "clam" + collection = "whelk" + expected = "projects/{project}/databases/{database}/collectionGroups/{collection}".format(project=project, database=database, collection=collection, ) + actual = FirestoreAdminClient.collection_group_path(project, database, collection) + assert expected == actual + + +def test_parse_collection_group_path(): + expected = { + "project": "octopus", + "database": "oyster", + "collection": "nudibranch", + } + path = FirestoreAdminClient.collection_group_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_collection_group_path(path) + assert expected == actual + +def test_database_path(): + project = "cuttlefish" + database = "mussel" + expected = "projects/{project}/databases/{database}".format(project=project, database=database, ) + actual = FirestoreAdminClient.database_path(project, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "winkle", + "database": "nautilus", + } + path = FirestoreAdminClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_database_path(path) + assert expected == actual + +def test_field_path(): + project = "scallop" + database = "abalone" + collection = "squid" + field = "clam" + expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format(project=project, database=database, collection=collection, field=field, ) + actual = FirestoreAdminClient.field_path(project, database, collection, field) + assert expected == actual + + +def test_parse_field_path(): + expected = { + "project": "whelk", + "database": "octopus", + "collection": "oyster", + "field": "nudibranch", + } + path = FirestoreAdminClient.field_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_field_path(path) + assert expected == actual + +def test_index_path(): + project = "cuttlefish" + database = "mussel" + collection = "winkle" + index = "nautilus" + expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format(project=project, database=database, collection=collection, index=index, ) + actual = FirestoreAdminClient.index_path(project, database, collection, index) + assert expected == actual + + +def test_parse_index_path(): + expected = { + "project": "scallop", + "database": "abalone", + "collection": "squid", + "index": "clam", + } + path = FirestoreAdminClient.index_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_index_path(path) + assert expected == actual + +def test_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = FirestoreAdminClient.location_path(project, location) + assert expected == actual + + +def test_parse_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = FirestoreAdminClient.location_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_location_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = FirestoreAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = FirestoreAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = FirestoreAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = FirestoreAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = FirestoreAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = FirestoreAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = FirestoreAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = FirestoreAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = FirestoreAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = FirestoreAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = FirestoreAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.FirestoreAdminTransport, '_prep_wrapped_messages') as prep: + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.FirestoreAdminTransport, '_prep_wrapped_messages') as prep: + transport_class = FirestoreAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/databases/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = FirestoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport), + (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/.coveragerc b/owl-bot-staging/firestore_bundle/firestore-bundle-py/.coveragerc new file mode 100644 index 0000000000..8c3b084db3 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/bundle/__init__.py + google/cloud/bundle/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/.flake8 b/owl-bot-staging/firestore_bundle/firestore-bundle-py/.flake8 new file mode 100644 index 0000000000..29227d4cf4 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/MANIFEST.in b/owl-bot-staging/firestore_bundle/firestore-bundle-py/MANIFEST.in new file mode 100644 index 0000000000..67688af043 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/bundle *.py +recursive-include google/cloud/bundle *.py diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/README.rst b/owl-bot-staging/firestore_bundle/firestore-bundle-py/README.rst new file mode 100644 index 0000000000..e0cf79f104 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Bundle API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Bundle API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/_static/custom.css b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/_static/custom.css new file mode 100644 index 0000000000..06423be0b5 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/services_.rst b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/services_.rst new file mode 100644 index 0000000000..535624ca8c --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/services_.rst @@ -0,0 +1,4 @@ +Services for Google Cloud Bundle API +===================================== +.. toctree:: + :maxdepth: 2 diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/types_.rst b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/types_.rst new file mode 100644 index 0000000000..2fdc85ab78 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Bundle API +================================== + +.. automodule:: google.cloud.bundle.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/conf.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/conf.py new file mode 100644 index 0000000000..7e747b50b1 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-bundle documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-bundle" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-bundle-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-bundle.tex", + u"google-cloud-bundle Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-bundle", + u"Google Cloud Bundle Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-bundle", + u"google-cloud-bundle Documentation", + author, + "google-cloud-bundle", + "GAPIC library for Google Cloud Bundle API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/index.rst b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/index.rst new file mode 100644 index 0000000000..ddec428d33 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + bundle/services + bundle/types diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/__init__.py new file mode 100644 index 0000000000..3e35a98926 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/__init__.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bundle import gapic_version as package_version + +__version__ = package_version.__version__ + + + +from .types.bundle import BundledDocumentMetadata +from .types.bundle import BundledQuery +from .types.bundle import BundleElement +from .types.bundle import BundleMetadata +from .types.bundle import NamedQuery + +__all__ = ( +'BundleElement', +'BundleMetadata', +'BundledDocumentMetadata', +'BundledQuery', +'NamedQuery', +) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_metadata.json b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_metadata.json new file mode 100644 index 0000000000..e81fe51253 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_metadata.json @@ -0,0 +1,7 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bundle", + "protoPackage": "google.firestore.bundle", + "schema": "1.0" +} diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_version.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_version.py new file mode 100644 index 0000000000..558c8aab67 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/py.typed b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/py.typed new file mode 100644 index 0000000000..e2987f2963 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bundle package uses inline types. diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/services/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/services/__init__.py new file mode 100644 index 0000000000..8f6cf06824 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/__init__.py new file mode 100644 index 0000000000..bd79268795 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .bundle import ( + BundledDocumentMetadata, + BundledQuery, + BundleElement, + BundleMetadata, + NamedQuery, +) + +__all__ = ( + 'BundledDocumentMetadata', + 'BundledQuery', + 'BundleElement', + 'BundleMetadata', + 'NamedQuery', +) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/bundle.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/bundle.py new file mode 100644 index 0000000000..074b54601e --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/bundle.py @@ -0,0 +1,251 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.firestore.v1 import document_pb2 # type: ignore +from google.firestore.v1 import query_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.firestore.bundle', + manifest={ + 'BundledQuery', + 'NamedQuery', + 'BundledDocumentMetadata', + 'BundleMetadata', + 'BundleElement', + }, +) + + +class BundledQuery(proto.Message): + r"""Encodes a query saved in the bundle. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + The parent resource name. + structured_query (google.firestore.v1.query_pb2.StructuredQuery): + A structured query. + + This field is a member of `oneof`_ ``query_type``. + limit_type (google.cloud.bundle.types.BundledQuery.LimitType): + + """ + class LimitType(proto.Enum): + r"""If the query is a limit query, should the limit be applied to + the beginning or the end of results. + + Values: + FIRST (0): + No description available. + LAST (1): + No description available. + """ + FIRST = 0 + LAST = 1 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + structured_query: query_pb2.StructuredQuery = proto.Field( + proto.MESSAGE, + number=2, + oneof='query_type', + message=query_pb2.StructuredQuery, + ) + limit_type: LimitType = proto.Field( + proto.ENUM, + number=3, + enum=LimitType, + ) + + +class NamedQuery(proto.Message): + r"""A Query associated with a name, created as part of the bundle + file, and can be read by client SDKs once the bundle containing + them is loaded. + + Attributes: + name (str): + Name of the query, such that client can use + the name to load this query from bundle, and + resume from when the query results are + materialized into this bundle. + bundled_query (google.cloud.bundle.types.BundledQuery): + The query saved in the bundle. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The read time of the query, when it is used + to build the bundle. This is useful to resume + the query from the bundle, once it is loaded by + client SDKs. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + bundled_query: 'BundledQuery' = proto.Field( + proto.MESSAGE, + number=2, + message='BundledQuery', + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class BundledDocumentMetadata(proto.Message): + r"""Metadata describing a Firestore document saved in the bundle. + + Attributes: + name (str): + The document key of a bundled document. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The snapshot version of the document data + bundled. + exists (bool): + Whether the document exists. + queries (MutableSequence[str]): + The names of the queries in this bundle that + this document matches to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + exists: bool = proto.Field( + proto.BOOL, + number=3, + ) + queries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class BundleMetadata(proto.Message): + r"""Metadata describing the bundle file/stream. + + Attributes: + id (str): + The ID of the bundle. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time at which the documents snapshot is taken + for this bundle. + version (int): + The schema version of the bundle. + total_documents (int): + The number of documents in the bundle. + total_bytes (int): + The size of the bundle in bytes, excluding this + ``BundleMetadata``. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + version: int = proto.Field( + proto.UINT32, + number=3, + ) + total_documents: int = proto.Field( + proto.UINT32, + number=4, + ) + total_bytes: int = proto.Field( + proto.UINT64, + number=5, + ) + + +class BundleElement(proto.Message): + r"""A Firestore bundle is a length-prefixed stream of JSON + representations of ``BundleElement``. Only one ``BundleMetadata`` is + expected, and it should be the first element. The named queries + follow after ``metadata``. Every ``document_metadata`` is + immediately followed by a ``document``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + metadata (google.cloud.bundle.types.BundleMetadata): + + This field is a member of `oneof`_ ``element_type``. + named_query (google.cloud.bundle.types.NamedQuery): + + This field is a member of `oneof`_ ``element_type``. + document_metadata (google.cloud.bundle.types.BundledDocumentMetadata): + + This field is a member of `oneof`_ ``element_type``. + document (google.firestore.v1.document_pb2.Document): + + This field is a member of `oneof`_ ``element_type``. + """ + + metadata: 'BundleMetadata' = proto.Field( + proto.MESSAGE, + number=1, + oneof='element_type', + message='BundleMetadata', + ) + named_query: 'NamedQuery' = proto.Field( + proto.MESSAGE, + number=2, + oneof='element_type', + message='NamedQuery', + ) + document_metadata: 'BundledDocumentMetadata' = proto.Field( + proto.MESSAGE, + number=3, + oneof='element_type', + message='BundledDocumentMetadata', + ) + document: document_pb2.Document = proto.Field( + proto.MESSAGE, + number=4, + oneof='element_type', + message=document_pb2.Document, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/mypy.ini b/owl-bot-staging/firestore_bundle/firestore-bundle-py/mypy.ini new file mode 100644 index 0000000000..574c5aed39 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/noxfile.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/noxfile.py new file mode 100644 index 0000000000..0d6a9976d2 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/noxfile.py @@ -0,0 +1,253 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12" +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-bundle' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.12" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/bundle/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + +@nox.session(python=ALL_PYTHON[-1]) +def prerelease_deps(session): + """Run the unit test suite against pre-release versions of dependencies.""" + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/bundle/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/scripts/fixup_bundle_keywords.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/scripts/fixup_bundle_keywords.py new file mode 100644 index 0000000000..ff298689fd --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/scripts/fixup_bundle_keywords.py @@ -0,0 +1,175 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class bundleCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=bundleCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the bundle client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/setup.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/setup.py new file mode 100644 index 0000000000..85af540536 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/setup.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-bundle' + + +description = "Google Cloud Bundle API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/bundle/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bundle" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.10.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.10.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.11.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.11.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.12.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.12.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.7.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.7.txt new file mode 100644 index 0000000000..b8a550c738 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.8.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.8.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.9.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.9.txt new file mode 100644 index 0000000000..ed7f9aed25 --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/bundle/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/bundle/__init__.py new file mode 100644 index 0000000000..7b3de3117f --- /dev/null +++ b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/bundle/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# From 07727b68736575e325eb255131cb37f48dba2ba9 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 2 Apr 2024 22:00:58 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../services/firestore_admin/async_client.py | 5 +- .../services/firestore_admin/client.py | 5 +- .../firestore_admin/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- owl-bot-staging/firestore/v1/.coveragerc | 13 - owl-bot-staging/firestore/v1/.flake8 | 33 - owl-bot-staging/firestore/v1/MANIFEST.in | 2 - owl-bot-staging/firestore/v1/README.rst | 49 - .../firestore/v1/docs/_static/custom.css | 3 - owl-bot-staging/firestore/v1/docs/conf.py | 376 - .../v1/docs/firestore_v1/firestore.rst | 10 - .../v1/docs/firestore_v1/services_.rst | 6 - .../firestore/v1/docs/firestore_v1/types_.rst | 6 - owl-bot-staging/firestore/v1/docs/index.rst | 7 - .../firestore/v1/firestore-v1-py.tar.gz | 0 .../v1/google/cloud/firestore/__init__.py | 133 - .../google/cloud/firestore/gapic_version.py | 16 - .../v1/google/cloud/firestore/py.typed | 2 - .../v1/google/cloud/firestore_v1/__init__.py | 134 - .../cloud/firestore_v1/gapic_metadata.json | 268 - .../cloud/firestore_v1/gapic_version.py | 16 - .../v1/google/cloud/firestore_v1/py.typed | 2 - .../cloud/firestore_v1/services/__init__.py | 15 - .../services/firestore/__init__.py | 22 - .../services/firestore/async_client.py | 2200 --- .../firestore_v1/services/firestore/client.py | 2416 --- .../firestore_v1/services/firestore/pagers.py | 383 - .../services/firestore/transports/__init__.py | 38 - .../services/firestore/transports/base.py | 532 - .../services/firestore/transports/grpc.py | 774 - .../firestore/transports/grpc_asyncio.py | 773 - .../services/firestore/transports/rest.py | 2188 --- .../cloud/firestore_v1/types/__init__.py | 140 - .../firestore_v1/types/aggregation_result.py | 60 - .../cloud/firestore_v1/types/bloom_filter.py | 110 - .../google/cloud/firestore_v1/types/common.py | 172 - .../cloud/firestore_v1/types/document.py | 288 - .../cloud/firestore_v1/types/firestore.py | 1758 -- .../google/cloud/firestore_v1/types/query.py | 875 - .../cloud/firestore_v1/types/query_profile.py | 144 - .../google/cloud/firestore_v1/types/write.py | 509 - owl-bot-staging/firestore/v1/mypy.ini | 3 - owl-bot-staging/firestore/v1/noxfile.py | 253 - ...ted_firestore_batch_get_documents_async.py | 54 - ...ated_firestore_batch_get_documents_sync.py | 54 - ...1_generated_firestore_batch_write_async.py | 52 - ...v1_generated_firestore_batch_write_sync.py | 52 - ...rated_firestore_begin_transaction_async.py | 52 - ...erated_firestore_begin_transaction_sync.py | 52 - ...ore_v1_generated_firestore_commit_async.py | 52 - ...tore_v1_generated_firestore_commit_sync.py | 52 - ...nerated_firestore_create_document_async.py | 53 - ...enerated_firestore_create_document_sync.py | 53 - ...nerated_firestore_delete_document_async.py | 50 - ...enerated_firestore_delete_document_sync.py | 50 - ..._generated_firestore_get_document_async.py | 53 - ...1_generated_firestore_get_document_sync.py | 53 - ...ted_firestore_list_collection_ids_async.py | 53 - ...ated_firestore_list_collection_ids_sync.py | 53 - ...enerated_firestore_list_documents_async.py | 54 - ...generated_firestore_list_documents_sync.py | 54 - ...ore_v1_generated_firestore_listen_async.py | 67 - ...tore_v1_generated_firestore_listen_sync.py | 67 - ...nerated_firestore_partition_query_async.py | 53 - ...enerated_firestore_partition_query_sync.py | 53 - ...e_v1_generated_firestore_rollback_async.py | 51 - ...re_v1_generated_firestore_rollback_sync.py | 51 - ...d_firestore_run_aggregation_query_async.py | 54 - ...ed_firestore_run_aggregation_query_sync.py | 54 - ..._v1_generated_firestore_run_query_async.py | 54 - ...e_v1_generated_firestore_run_query_sync.py | 54 - ...nerated_firestore_update_document_async.py | 51 - ...enerated_firestore_update_document_sync.py | 51 - ...tore_v1_generated_firestore_write_async.py | 63 - ...store_v1_generated_firestore_write_sync.py | 63 - .../snippet_metadata_google.firestore.v1.json | 2523 --- .../v1/scripts/fixup_firestore_v1_keywords.py | 191 - owl-bot-staging/firestore/v1/setup.py | 93 - .../firestore/v1/testing/constraints-3.10.txt | 6 - .../firestore/v1/testing/constraints-3.11.txt | 6 - .../firestore/v1/testing/constraints-3.12.txt | 6 - .../firestore/v1/testing/constraints-3.7.txt | 10 - .../firestore/v1/testing/constraints-3.8.txt | 6 - .../firestore/v1/testing/constraints-3.9.txt | 6 - .../firestore/v1/tests/__init__.py | 16 - .../firestore/v1/tests/unit/__init__.py | 16 - .../firestore/v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/firestore_v1/__init__.py | 16 - .../unit/gapic/firestore_v1/test_firestore.py | 9455 ---------- .../firestore_admin/v1/.coveragerc | 13 - owl-bot-staging/firestore_admin/v1/.flake8 | 33 - .../firestore_admin/v1/MANIFEST.in | 2 - owl-bot-staging/firestore_admin/v1/README.rst | 49 - .../v1/docs/_static/custom.css | 3 - .../firestore_admin/v1/docs/conf.py | 376 - .../firestore_admin_v1/firestore_admin.rst | 10 - .../v1/docs/firestore_admin_v1/services_.rst | 6 - .../v1/docs/firestore_admin_v1/types_.rst | 6 - .../firestore_admin/v1/docs/index.rst | 7 - .../google/cloud/firestore_admin/__init__.py | 121 - .../cloud/firestore_admin/gapic_version.py | 16 - .../v1/google/cloud/firestore_admin/py.typed | 2 - .../cloud/firestore_admin_v1/__init__.py | 122 - .../firestore_admin_v1/gapic_metadata.json | 373 - .../cloud/firestore_admin_v1/gapic_version.py | 16 - .../google/cloud/firestore_admin_v1/py.typed | 2 - .../firestore_admin_v1/services/__init__.py | 15 - .../services/firestore_admin/__init__.py | 22 - .../services/firestore_admin/async_client.py | 3202 ---- .../services/firestore_admin/client.py | 3570 ---- .../services/firestore_admin/pagers.py | 262 - .../firestore_admin/transports/__init__.py | 38 - .../firestore_admin/transports/base.py | 551 - .../firestore_admin/transports/grpc.py | 1032 -- .../transports/grpc_asyncio.py | 1031 -- .../firestore_admin/transports/rest.py | 3178 ---- .../firestore_admin_v1/types/__init__.py | 128 - .../cloud/firestore_admin_v1/types/backup.py | 152 - .../firestore_admin_v1/types/database.py | 294 - .../cloud/firestore_admin_v1/types/field.py | 183 - .../types/firestore_admin.py | 815 - .../cloud/firestore_admin_v1/types/index.py | 301 - .../firestore_admin_v1/types/location.py | 38 - .../firestore_admin_v1/types/operation.py | 507 - .../firestore_admin_v1/types/schedule.py | 145 - owl-bot-staging/firestore_admin/v1/mypy.ini | 3 - owl-bot-staging/firestore_admin/v1/noxfile.py | 253 - ...tore_admin_create_backup_schedule_async.py | 52 - ...store_admin_create_backup_schedule_sync.py | 52 - ...d_firestore_admin_create_database_async.py | 57 - ...ed_firestore_admin_create_database_sync.py | 57 - ...ated_firestore_admin_create_index_async.py | 56 - ...rated_firestore_admin_create_index_sync.py | 56 - ...ted_firestore_admin_delete_backup_async.py | 50 - ...tore_admin_delete_backup_schedule_async.py | 50 - ...store_admin_delete_backup_schedule_sync.py | 50 - ...ated_firestore_admin_delete_backup_sync.py | 50 - ...d_firestore_admin_delete_database_async.py | 56 - ...ed_firestore_admin_delete_database_sync.py | 56 - ...ated_firestore_admin_delete_index_async.py | 50 - ...rated_firestore_admin_delete_index_sync.py | 50 - ..._firestore_admin_export_documents_async.py | 56 - ...d_firestore_admin_export_documents_sync.py | 56 - ...erated_firestore_admin_get_backup_async.py | 52 - ...restore_admin_get_backup_schedule_async.py | 52 - ...irestore_admin_get_backup_schedule_sync.py | 52 - ...nerated_firestore_admin_get_backup_sync.py | 52 - ...ated_firestore_admin_get_database_async.py | 52 - ...rated_firestore_admin_get_database_sync.py | 52 - ...nerated_firestore_admin_get_field_async.py | 52 - ...enerated_firestore_admin_get_field_sync.py | 52 - ...nerated_firestore_admin_get_index_async.py | 52 - ...enerated_firestore_admin_get_index_sync.py | 52 - ..._firestore_admin_import_documents_async.py | 56 - ...d_firestore_admin_import_documents_sync.py | 56 - ...store_admin_list_backup_schedules_async.py | 52 - ...estore_admin_list_backup_schedules_sync.py | 52 - ...ated_firestore_admin_list_backups_async.py | 52 - ...rated_firestore_admin_list_backups_sync.py | 52 - ...ed_firestore_admin_list_databases_async.py | 52 - ...ted_firestore_admin_list_databases_sync.py | 52 - ...rated_firestore_admin_list_fields_async.py | 53 - ...erated_firestore_admin_list_fields_sync.py | 53 - ...ated_firestore_admin_list_indexes_async.py | 53 - ...rated_firestore_admin_list_indexes_sync.py | 53 - ..._firestore_admin_restore_database_async.py | 58 - ...d_firestore_admin_restore_database_sync.py | 58 - ...tore_admin_update_backup_schedule_async.py | 51 - ...store_admin_update_backup_schedule_sync.py | 51 - ...d_firestore_admin_update_database_async.py | 55 - ...ed_firestore_admin_update_database_sync.py | 55 - ...ated_firestore_admin_update_field_async.py | 59 - ...rated_firestore_admin_update_field_sync.py | 59 - ...et_metadata_google.firestore.admin.v1.json | 3740 ---- .../fixup_firestore_admin_v1_keywords.py | 198 - owl-bot-staging/firestore_admin/v1/setup.py | 93 - .../v1/testing/constraints-3.10.txt | 6 - .../v1/testing/constraints-3.11.txt | 6 - .../v1/testing/constraints-3.12.txt | 6 - .../v1/testing/constraints-3.7.txt | 10 - .../v1/testing/constraints-3.8.txt | 6 - .../v1/testing/constraints-3.9.txt | 6 - .../firestore_admin/v1/tests/__init__.py | 16 - .../firestore_admin/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/firestore_admin_v1/__init__.py | 16 - .../test_firestore_admin.py | 15150 ---------------- .../firestore-bundle-py/.coveragerc | 13 - .../firestore-bundle-py/.flake8 | 33 - .../firestore-bundle-py/MANIFEST.in | 2 - .../firestore-bundle-py/README.rst | 49 - .../docs/_static/custom.css | 3 - .../docs/bundle/services_.rst | 4 - .../docs/bundle/types_.rst | 6 - .../firestore-bundle-py/docs/conf.py | 376 - .../firestore-bundle-py/docs/index.rst | 7 - .../google/cloud/bundle/__init__.py | 34 - .../google/cloud/bundle/gapic_metadata.json | 7 - .../google/cloud/bundle/gapic_version.py | 16 - .../google/cloud/bundle/py.typed | 2 - .../google/cloud/bundle/services/__init__.py | 15 - .../google/cloud/bundle/types/__init__.py | 30 - .../google/cloud/bundle/types/bundle.py | 251 - .../firestore-bundle-py/mypy.ini | 3 - .../firestore-bundle-py/noxfile.py | 253 - .../scripts/fixup_bundle_keywords.py | 175 - .../firestore-bundle-py/setup.py | 93 - .../testing/constraints-3.10.txt | 6 - .../testing/constraints-3.11.txt | 6 - .../testing/constraints-3.12.txt | 6 - .../testing/constraints-3.7.txt | 10 - .../testing/constraints-3.8.txt | 6 - .../testing/constraints-3.9.txt | 6 - .../firestore-bundle-py/tests/__init__.py | 16 - .../tests/unit/__init__.py | 16 - .../tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/bundle/__init__.py | 16 - 217 files changed, 8 insertions(+), 68933 deletions(-) delete mode 100644 owl-bot-staging/firestore/v1/.coveragerc delete mode 100644 owl-bot-staging/firestore/v1/.flake8 delete mode 100644 owl-bot-staging/firestore/v1/MANIFEST.in delete mode 100644 owl-bot-staging/firestore/v1/README.rst delete mode 100644 owl-bot-staging/firestore/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/firestore/v1/docs/conf.py delete mode 100644 owl-bot-staging/firestore/v1/docs/firestore_v1/firestore.rst delete mode 100644 owl-bot-staging/firestore/v1/docs/firestore_v1/services_.rst delete mode 100644 owl-bot-staging/firestore/v1/docs/firestore_v1/types_.rst delete mode 100644 owl-bot-staging/firestore/v1/docs/index.rst delete mode 100644 owl-bot-staging/firestore/v1/firestore-v1-py.tar.gz delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore/gapic_version.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore/py.typed delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_version.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/py.typed delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/async_client.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/client.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/pagers.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/base.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/rest.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/aggregation_result.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/bloom_filter.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/common.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/document.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/firestore.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query_profile.py delete mode 100644 owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/write.py delete mode 100644 owl-bot-staging/firestore/v1/mypy.ini delete mode 100644 owl-bot-staging/firestore/v1/noxfile.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_async.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_sync.py delete mode 100644 owl-bot-staging/firestore/v1/samples/generated_samples/snippet_metadata_google.firestore.v1.json delete mode 100644 owl-bot-staging/firestore/v1/scripts/fixup_firestore_v1_keywords.py delete mode 100644 owl-bot-staging/firestore/v1/setup.py delete mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/firestore/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/firestore/v1/tests/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/__init__.py delete mode 100644 owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/test_firestore.py delete mode 100644 owl-bot-staging/firestore_admin/v1/.coveragerc delete mode 100644 owl-bot-staging/firestore_admin/v1/.flake8 delete mode 100644 owl-bot-staging/firestore_admin/v1/MANIFEST.in delete mode 100644 owl-bot-staging/firestore_admin/v1/README.rst delete mode 100644 owl-bot-staging/firestore_admin/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/firestore_admin/v1/docs/conf.py delete mode 100644 owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/firestore_admin.rst delete mode 100644 owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/services_.rst delete mode 100644 owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/types_.rst delete mode 100644 owl-bot-staging/firestore_admin/v1/docs/index.rst delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/gapic_version.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/py.typed delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_version.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/py.typed delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/client.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/backup.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/database.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/field.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/firestore_admin.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/index.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/location.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/operation.py delete mode 100644 owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/schedule.py delete mode 100644 owl-bot-staging/firestore_admin/v1/mypy.ini delete mode 100644 owl-bot-staging/firestore_admin/v1/noxfile.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_async.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_sync.py delete mode 100644 owl-bot-staging/firestore_admin/v1/samples/generated_samples/snippet_metadata_google.firestore.admin.v1.json delete mode 100644 owl-bot-staging/firestore_admin/v1/scripts/fixup_firestore_admin_v1_keywords.py delete mode 100644 owl-bot-staging/firestore_admin/v1/setup.py delete mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/firestore_admin/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/firestore_admin/v1/tests/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/__init__.py delete mode 100644 owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/.coveragerc delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/.flake8 delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/MANIFEST.in delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/README.rst delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/_static/custom.css delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/services_.rst delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/types_.rst delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/conf.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/index.rst delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/__init__.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_metadata.json delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_version.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/py.typed delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/services/__init__.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/__init__.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/bundle.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/mypy.ini delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/noxfile.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/scripts/fixup_bundle_keywords.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/setup.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/__init__.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/__init__.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/bundle/__init__.py diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py index 6126412360..d04c3abb30 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py @@ -2509,9 +2509,8 @@ async def create_backup_schedule( ) -> schedule.BackupSchedule: r"""Creates a backup schedule on a database. At most two backup schedules can be configured on a - database, one daily backup schedule with retention up to - 7 days and one weekly backup schedule with retention up - to 14 weeks. + database, one daily backup schedule and one weekly + backup schedule. .. code-block:: python diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py index 187e2d391b..d544c706a4 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py @@ -2978,9 +2978,8 @@ def create_backup_schedule( ) -> schedule.BackupSchedule: r"""Creates a backup schedule on a database. At most two backup schedules can be configured on a - database, one daily backup schedule with retention up to - 7 days and one weekly backup schedule with retention up - to 14 weeks. + database, one daily backup schedule and one weekly + backup schedule. .. code-block:: python diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py index f06ca83bd9..cb0e076df4 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py @@ -828,9 +828,8 @@ def create_backup_schedule( Creates a backup schedule on a database. At most two backup schedules can be configured on a - database, one daily backup schedule with retention up to - 7 days and one weekly backup schedule with retention up - to 14 weeks. + database, one daily backup schedule and one weekly + backup schedule. Returns: Callable[[~.CreateBackupScheduleRequest], diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py index 78c18a043d..35710e628d 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py @@ -860,9 +860,8 @@ def create_backup_schedule( Creates a backup schedule on a database. At most two backup schedules can be configured on a - database, one daily backup schedule with retention up to - 7 days and one weekly backup schedule with retention up - to 14 weeks. + database, one daily backup schedule and one weekly + backup schedule. Returns: Callable[[~.CreateBackupScheduleRequest], diff --git a/owl-bot-staging/firestore/v1/.coveragerc b/owl-bot-staging/firestore/v1/.coveragerc deleted file mode 100644 index 4c355f6455..0000000000 --- a/owl-bot-staging/firestore/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/firestore/__init__.py - google/cloud/firestore/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/firestore/v1/.flake8 b/owl-bot-staging/firestore/v1/.flake8 deleted file mode 100644 index 29227d4cf4..0000000000 --- a/owl-bot-staging/firestore/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/firestore/v1/MANIFEST.in b/owl-bot-staging/firestore/v1/MANIFEST.in deleted file mode 100644 index f51407a0a0..0000000000 --- a/owl-bot-staging/firestore/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/firestore *.py -recursive-include google/cloud/firestore_v1 *.py diff --git a/owl-bot-staging/firestore/v1/README.rst b/owl-bot-staging/firestore/v1/README.rst deleted file mode 100644 index c132117e8d..0000000000 --- a/owl-bot-staging/firestore/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Firestore API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Firestore API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/firestore/v1/docs/_static/custom.css b/owl-bot-staging/firestore/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b5..0000000000 --- a/owl-bot-staging/firestore/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/firestore/v1/docs/conf.py b/owl-bot-staging/firestore/v1/docs/conf.py deleted file mode 100644 index 7eae2df026..0000000000 --- a/owl-bot-staging/firestore/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-firestore documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-firestore" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-firestore-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-firestore.tex", - u"google-cloud-firestore Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-firestore", - u"Google Cloud Firestore Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-firestore", - u"google-cloud-firestore Documentation", - author, - "google-cloud-firestore", - "GAPIC library for Google Cloud Firestore API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/firestore/v1/docs/firestore_v1/firestore.rst b/owl-bot-staging/firestore/v1/docs/firestore_v1/firestore.rst deleted file mode 100644 index c32652de6b..0000000000 --- a/owl-bot-staging/firestore/v1/docs/firestore_v1/firestore.rst +++ /dev/null @@ -1,10 +0,0 @@ -Firestore ---------------------------- - -.. automodule:: google.cloud.firestore_v1.services.firestore - :members: - :inherited-members: - -.. automodule:: google.cloud.firestore_v1.services.firestore.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/firestore/v1/docs/firestore_v1/services_.rst b/owl-bot-staging/firestore/v1/docs/firestore_v1/services_.rst deleted file mode 100644 index f48b25d8cf..0000000000 --- a/owl-bot-staging/firestore/v1/docs/firestore_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Firestore v1 API -========================================== -.. toctree:: - :maxdepth: 2 - - firestore diff --git a/owl-bot-staging/firestore/v1/docs/firestore_v1/types_.rst b/owl-bot-staging/firestore/v1/docs/firestore_v1/types_.rst deleted file mode 100644 index 1cc2e75c73..0000000000 --- a/owl-bot-staging/firestore/v1/docs/firestore_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Firestore v1 API -======================================= - -.. automodule:: google.cloud.firestore_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/firestore/v1/docs/index.rst b/owl-bot-staging/firestore/v1/docs/index.rst deleted file mode 100644 index 3c5e2cb410..0000000000 --- a/owl-bot-staging/firestore/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - firestore_v1/services - firestore_v1/types diff --git a/owl-bot-staging/firestore/v1/firestore-v1-py.tar.gz b/owl-bot-staging/firestore/v1/firestore-v1-py.tar.gz deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore/__init__.py deleted file mode 100644 index 447c27098e..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore/__init__.py +++ /dev/null @@ -1,133 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.firestore import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.firestore_v1.services.firestore.client import FirestoreClient -from google.cloud.firestore_v1.services.firestore.async_client import FirestoreAsyncClient - -from google.cloud.firestore_v1.types.aggregation_result import AggregationResult -from google.cloud.firestore_v1.types.bloom_filter import BitSequence -from google.cloud.firestore_v1.types.bloom_filter import BloomFilter -from google.cloud.firestore_v1.types.common import DocumentMask -from google.cloud.firestore_v1.types.common import Precondition -from google.cloud.firestore_v1.types.common import TransactionOptions -from google.cloud.firestore_v1.types.document import ArrayValue -from google.cloud.firestore_v1.types.document import Document -from google.cloud.firestore_v1.types.document import MapValue -from google.cloud.firestore_v1.types.document import Value -from google.cloud.firestore_v1.types.firestore import BatchGetDocumentsRequest -from google.cloud.firestore_v1.types.firestore import BatchGetDocumentsResponse -from google.cloud.firestore_v1.types.firestore import BatchWriteRequest -from google.cloud.firestore_v1.types.firestore import BatchWriteResponse -from google.cloud.firestore_v1.types.firestore import BeginTransactionRequest -from google.cloud.firestore_v1.types.firestore import BeginTransactionResponse -from google.cloud.firestore_v1.types.firestore import CommitRequest -from google.cloud.firestore_v1.types.firestore import CommitResponse -from google.cloud.firestore_v1.types.firestore import CreateDocumentRequest -from google.cloud.firestore_v1.types.firestore import DeleteDocumentRequest -from google.cloud.firestore_v1.types.firestore import GetDocumentRequest -from google.cloud.firestore_v1.types.firestore import ListCollectionIdsRequest -from google.cloud.firestore_v1.types.firestore import ListCollectionIdsResponse -from google.cloud.firestore_v1.types.firestore import ListDocumentsRequest -from google.cloud.firestore_v1.types.firestore import ListDocumentsResponse -from google.cloud.firestore_v1.types.firestore import ListenRequest -from google.cloud.firestore_v1.types.firestore import ListenResponse -from google.cloud.firestore_v1.types.firestore import PartitionQueryRequest -from google.cloud.firestore_v1.types.firestore import PartitionQueryResponse -from google.cloud.firestore_v1.types.firestore import RollbackRequest -from google.cloud.firestore_v1.types.firestore import RunAggregationQueryRequest -from google.cloud.firestore_v1.types.firestore import RunAggregationQueryResponse -from google.cloud.firestore_v1.types.firestore import RunQueryRequest -from google.cloud.firestore_v1.types.firestore import RunQueryResponse -from google.cloud.firestore_v1.types.firestore import Target -from google.cloud.firestore_v1.types.firestore import TargetChange -from google.cloud.firestore_v1.types.firestore import UpdateDocumentRequest -from google.cloud.firestore_v1.types.firestore import WriteRequest -from google.cloud.firestore_v1.types.firestore import WriteResponse -from google.cloud.firestore_v1.types.query import Cursor -from google.cloud.firestore_v1.types.query import StructuredAggregationQuery -from google.cloud.firestore_v1.types.query import StructuredQuery -from google.cloud.firestore_v1.types.query_profile import ExecutionStats -from google.cloud.firestore_v1.types.query_profile import ExplainMetrics -from google.cloud.firestore_v1.types.query_profile import ExplainOptions -from google.cloud.firestore_v1.types.query_profile import PlanSummary -from google.cloud.firestore_v1.types.write import DocumentChange -from google.cloud.firestore_v1.types.write import DocumentDelete -from google.cloud.firestore_v1.types.write import DocumentRemove -from google.cloud.firestore_v1.types.write import DocumentTransform -from google.cloud.firestore_v1.types.write import ExistenceFilter -from google.cloud.firestore_v1.types.write import Write -from google.cloud.firestore_v1.types.write import WriteResult - -__all__ = ('FirestoreClient', - 'FirestoreAsyncClient', - 'AggregationResult', - 'BitSequence', - 'BloomFilter', - 'DocumentMask', - 'Precondition', - 'TransactionOptions', - 'ArrayValue', - 'Document', - 'MapValue', - 'Value', - 'BatchGetDocumentsRequest', - 'BatchGetDocumentsResponse', - 'BatchWriteRequest', - 'BatchWriteResponse', - 'BeginTransactionRequest', - 'BeginTransactionResponse', - 'CommitRequest', - 'CommitResponse', - 'CreateDocumentRequest', - 'DeleteDocumentRequest', - 'GetDocumentRequest', - 'ListCollectionIdsRequest', - 'ListCollectionIdsResponse', - 'ListDocumentsRequest', - 'ListDocumentsResponse', - 'ListenRequest', - 'ListenResponse', - 'PartitionQueryRequest', - 'PartitionQueryResponse', - 'RollbackRequest', - 'RunAggregationQueryRequest', - 'RunAggregationQueryResponse', - 'RunQueryRequest', - 'RunQueryResponse', - 'Target', - 'TargetChange', - 'UpdateDocumentRequest', - 'WriteRequest', - 'WriteResponse', - 'Cursor', - 'StructuredAggregationQuery', - 'StructuredQuery', - 'ExecutionStats', - 'ExplainMetrics', - 'ExplainOptions', - 'PlanSummary', - 'DocumentChange', - 'DocumentDelete', - 'DocumentRemove', - 'DocumentTransform', - 'ExistenceFilter', - 'Write', - 'WriteResult', -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore/gapic_version.py b/owl-bot-staging/firestore/v1/google/cloud/firestore/gapic_version.py deleted file mode 100644 index 558c8aab67..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore/py.typed b/owl-bot-staging/firestore/v1/google/cloud/firestore/py.typed deleted file mode 100644 index 35a48b3acc..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-firestore package uses inline types. diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/__init__.py deleted file mode 100644 index 63ce6226f5..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/__init__.py +++ /dev/null @@ -1,134 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.firestore_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.firestore import FirestoreClient -from .services.firestore import FirestoreAsyncClient - -from .types.aggregation_result import AggregationResult -from .types.bloom_filter import BitSequence -from .types.bloom_filter import BloomFilter -from .types.common import DocumentMask -from .types.common import Precondition -from .types.common import TransactionOptions -from .types.document import ArrayValue -from .types.document import Document -from .types.document import MapValue -from .types.document import Value -from .types.firestore import BatchGetDocumentsRequest -from .types.firestore import BatchGetDocumentsResponse -from .types.firestore import BatchWriteRequest -from .types.firestore import BatchWriteResponse -from .types.firestore import BeginTransactionRequest -from .types.firestore import BeginTransactionResponse -from .types.firestore import CommitRequest -from .types.firestore import CommitResponse -from .types.firestore import CreateDocumentRequest -from .types.firestore import DeleteDocumentRequest -from .types.firestore import GetDocumentRequest -from .types.firestore import ListCollectionIdsRequest -from .types.firestore import ListCollectionIdsResponse -from .types.firestore import ListDocumentsRequest -from .types.firestore import ListDocumentsResponse -from .types.firestore import ListenRequest -from .types.firestore import ListenResponse -from .types.firestore import PartitionQueryRequest -from .types.firestore import PartitionQueryResponse -from .types.firestore import RollbackRequest -from .types.firestore import RunAggregationQueryRequest -from .types.firestore import RunAggregationQueryResponse -from .types.firestore import RunQueryRequest -from .types.firestore import RunQueryResponse -from .types.firestore import Target -from .types.firestore import TargetChange -from .types.firestore import UpdateDocumentRequest -from .types.firestore import WriteRequest -from .types.firestore import WriteResponse -from .types.query import Cursor -from .types.query import StructuredAggregationQuery -from .types.query import StructuredQuery -from .types.query_profile import ExecutionStats -from .types.query_profile import ExplainMetrics -from .types.query_profile import ExplainOptions -from .types.query_profile import PlanSummary -from .types.write import DocumentChange -from .types.write import DocumentDelete -from .types.write import DocumentRemove -from .types.write import DocumentTransform -from .types.write import ExistenceFilter -from .types.write import Write -from .types.write import WriteResult - -__all__ = ( - 'FirestoreAsyncClient', -'AggregationResult', -'ArrayValue', -'BatchGetDocumentsRequest', -'BatchGetDocumentsResponse', -'BatchWriteRequest', -'BatchWriteResponse', -'BeginTransactionRequest', -'BeginTransactionResponse', -'BitSequence', -'BloomFilter', -'CommitRequest', -'CommitResponse', -'CreateDocumentRequest', -'Cursor', -'DeleteDocumentRequest', -'Document', -'DocumentChange', -'DocumentDelete', -'DocumentMask', -'DocumentRemove', -'DocumentTransform', -'ExecutionStats', -'ExistenceFilter', -'ExplainMetrics', -'ExplainOptions', -'FirestoreClient', -'GetDocumentRequest', -'ListCollectionIdsRequest', -'ListCollectionIdsResponse', -'ListDocumentsRequest', -'ListDocumentsResponse', -'ListenRequest', -'ListenResponse', -'MapValue', -'PartitionQueryRequest', -'PartitionQueryResponse', -'PlanSummary', -'Precondition', -'RollbackRequest', -'RunAggregationQueryRequest', -'RunAggregationQueryResponse', -'RunQueryRequest', -'RunQueryResponse', -'StructuredAggregationQuery', -'StructuredQuery', -'Target', -'TargetChange', -'TransactionOptions', -'UpdateDocumentRequest', -'Value', -'Write', -'WriteRequest', -'WriteResponse', -'WriteResult', -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_metadata.json b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_metadata.json deleted file mode 100644 index d0462f9640..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_metadata.json +++ /dev/null @@ -1,268 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.firestore_v1", - "protoPackage": "google.firestore.v1", - "schema": "1.0", - "services": { - "Firestore": { - "clients": { - "grpc": { - "libraryClient": "FirestoreClient", - "rpcs": { - "BatchGetDocuments": { - "methods": [ - "batch_get_documents" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateDocument": { - "methods": [ - "create_document" - ] - }, - "DeleteDocument": { - "methods": [ - "delete_document" - ] - }, - "GetDocument": { - "methods": [ - "get_document" - ] - }, - "ListCollectionIds": { - "methods": [ - "list_collection_ids" - ] - }, - "ListDocuments": { - "methods": [ - "list_documents" - ] - }, - "Listen": { - "methods": [ - "listen" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "RunAggregationQuery": { - "methods": [ - "run_aggregation_query" - ] - }, - "RunQuery": { - "methods": [ - "run_query" - ] - }, - "UpdateDocument": { - "methods": [ - "update_document" - ] - }, - "Write": { - "methods": [ - "write" - ] - } - } - }, - "grpc-async": { - "libraryClient": "FirestoreAsyncClient", - "rpcs": { - "BatchGetDocuments": { - "methods": [ - "batch_get_documents" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateDocument": { - "methods": [ - "create_document" - ] - }, - "DeleteDocument": { - "methods": [ - "delete_document" - ] - }, - "GetDocument": { - "methods": [ - "get_document" - ] - }, - "ListCollectionIds": { - "methods": [ - "list_collection_ids" - ] - }, - "ListDocuments": { - "methods": [ - "list_documents" - ] - }, - "Listen": { - "methods": [ - "listen" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "RunAggregationQuery": { - "methods": [ - "run_aggregation_query" - ] - }, - "RunQuery": { - "methods": [ - "run_query" - ] - }, - "UpdateDocument": { - "methods": [ - "update_document" - ] - }, - "Write": { - "methods": [ - "write" - ] - } - } - }, - "rest": { - "libraryClient": "FirestoreClient", - "rpcs": { - "BatchGetDocuments": { - "methods": [ - "batch_get_documents" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateDocument": { - "methods": [ - "create_document" - ] - }, - "DeleteDocument": { - "methods": [ - "delete_document" - ] - }, - "GetDocument": { - "methods": [ - "get_document" - ] - }, - "ListCollectionIds": { - "methods": [ - "list_collection_ids" - ] - }, - "ListDocuments": { - "methods": [ - "list_documents" - ] - }, - "Listen": { - "methods": [ - "listen" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "RunAggregationQuery": { - "methods": [ - "run_aggregation_query" - ] - }, - "RunQuery": { - "methods": [ - "run_query" - ] - }, - "UpdateDocument": { - "methods": [ - "update_document" - ] - }, - "Write": { - "methods": [ - "write" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_version.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_version.py deleted file mode 100644 index 558c8aab67..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/py.typed b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/py.typed deleted file mode 100644 index 35a48b3acc..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-firestore package uses inline types. diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/__init__.py deleted file mode 100644 index 8f6cf06824..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/__init__.py deleted file mode 100644 index 3a2cdd9b1a..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import FirestoreClient -from .async_client import FirestoreAsyncClient - -__all__ = ( - 'FirestoreClient', - 'FirestoreAsyncClient', -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/async_client.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/async_client.py deleted file mode 100644 index 23e437047b..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/async_client.py +++ /dev/null @@ -1,2200 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union - -from google.cloud.firestore_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.firestore_v1.services.firestore import pagers -from google.cloud.firestore_v1.types import aggregation_result -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.firestore_v1.types import query -from google.cloud.firestore_v1.types import query_profile -from google.cloud.firestore_v1.types import write as gf_write -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport -from .client import FirestoreClient - - -class FirestoreAsyncClient: - """The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - """ - - _client: FirestoreClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = FirestoreClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = FirestoreClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = FirestoreClient._DEFAULT_UNIVERSE - - common_billing_account_path = staticmethod(FirestoreClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(FirestoreClient.parse_common_billing_account_path) - common_folder_path = staticmethod(FirestoreClient.common_folder_path) - parse_common_folder_path = staticmethod(FirestoreClient.parse_common_folder_path) - common_organization_path = staticmethod(FirestoreClient.common_organization_path) - parse_common_organization_path = staticmethod(FirestoreClient.parse_common_organization_path) - common_project_path = staticmethod(FirestoreClient.common_project_path) - parse_common_project_path = staticmethod(FirestoreClient.parse_common_project_path) - common_location_path = staticmethod(FirestoreClient.common_location_path) - parse_common_location_path = staticmethod(FirestoreClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAsyncClient: The constructed client. - """ - return FirestoreClient.from_service_account_info.__func__(FirestoreAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAsyncClient: The constructed client. - """ - return FirestoreClient.from_service_account_file.__func__(FirestoreAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return FirestoreClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> FirestoreTransport: - """Returns the transport used by the client instance. - - Returns: - FirestoreTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = functools.partial(type(FirestoreClient).get_transport_class, type(FirestoreClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, FirestoreTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the firestore async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.FirestoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = FirestoreClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def get_document(self, - request: Optional[Union[firestore.GetDocumentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> document.Document: - r"""Gets a single document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_get_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.GetDocumentRequest( - transaction=b'transaction_blob', - name="name_value", - ) - - # Make the request - response = await client.get_document(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.GetDocumentRequest, dict]]): - The request object. The request for - [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - request = firestore.GetDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_document, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_documents(self, - request: Optional[Union[firestore.ListDocumentsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDocumentsAsyncPager: - r"""Lists documents. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_list_documents(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.ListDocumentsRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - page_result = client.list_documents(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.ListDocumentsRequest, dict]]): - The request object. The request for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsAsyncPager: - The response for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - request = firestore.ListDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_documents, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - ("collection_id", request.collection_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDocumentsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_document(self, - request: Optional[Union[firestore.UpdateDocumentRequest, dict]] = None, - *, - document: Optional[gf_document.Document] = None, - update_mask: Optional[common.DocumentMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gf_document.Document: - r"""Updates or inserts a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_update_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.UpdateDocumentRequest( - ) - - # Make the request - response = await client.update_document(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.UpdateDocumentRequest, dict]]): - The request object. The request for - [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. - document (:class:`google.cloud.firestore_v1.types.Document`): - Required. The updated document. - Creates the document if it does not - already exist. - - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.cloud.firestore_v1.types.DocumentMask`): - The fields to update. - None of the field paths in the mask may - contain a reserved name. - - If the document exists on the server and - has fields not referenced in the mask, - they are left unchanged. - Fields referenced in the mask, but not - present in the input document, are - deleted from the document on the server. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore.UpdateDocumentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_document, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("document.name", request.document.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_document(self, - request: Optional[Union[firestore.DeleteDocumentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_delete_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.DeleteDocumentRequest( - name="name_value", - ) - - # Make the request - await client.delete_document(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.DeleteDocumentRequest, dict]]): - The request object. The request for - [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. - name (:class:`str`): - Required. The resource name of the Document to delete. - In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore.DeleteDocumentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_document, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def batch_get_documents(self, - request: Optional[Union[firestore.BatchGetDocumentsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Awaitable[AsyncIterable[firestore.BatchGetDocumentsResponse]]: - r"""Gets multiple documents. - - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_batch_get_documents(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.BatchGetDocumentsRequest( - transaction=b'transaction_blob', - database="database_value", - ) - - # Make the request - stream = await client.batch_get_documents(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.BatchGetDocumentsRequest, dict]]): - The request object. The request for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]: - The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - """ - # Create or coerce a protobuf request object. - request = firestore.BatchGetDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.batch_get_documents, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def begin_transaction(self, - request: Optional[Union[firestore.BeginTransactionRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.BeginTransactionResponse: - r"""Starts a new transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_begin_transaction(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.BeginTransactionRequest( - database="database_value", - ) - - # Make the request - response = await client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.BeginTransactionRequest, dict]]): - The request object. The request for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.BeginTransactionResponse: - The response for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore.BeginTransactionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.begin_transaction, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def commit(self, - request: Optional[Union[firestore.CommitRequest, dict]] = None, - *, - database: Optional[str] = None, - writes: Optional[MutableSequence[gf_write.Write]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.CommitResponse: - r"""Commits a transaction, while optionally updating - documents. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_commit(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.CommitRequest( - database="database_value", - ) - - # Make the request - response = await client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.CommitRequest, dict]]): - The request object. The request for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - writes (:class:`MutableSequence[google.cloud.firestore_v1.types.Write]`): - The writes to apply. - - Always executed atomically and in order. - - This corresponds to the ``writes`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.CommitResponse: - The response for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, writes]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore.CommitRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if writes: - request.writes.extend(writes) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.commit, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rollback(self, - request: Optional[Union[firestore.RollbackRequest, dict]] = None, - *, - database: Optional[str] = None, - transaction: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Rolls back a transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_rollback(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.RollbackRequest( - database="database_value", - transaction=b'transaction_blob', - ) - - # Make the request - await client.rollback(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.RollbackRequest, dict]]): - The request object. The request for - [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (:class:`bytes`): - Required. The transaction to roll - back. - - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, transaction]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore.RollbackRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if transaction is not None: - request.transaction = transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rollback, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def run_query(self, - request: Optional[Union[firestore.RunQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Awaitable[AsyncIterable[firestore.RunQueryResponse]]: - r"""Runs a query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_run_query(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.RunQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = await client.run_query(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.RunQueryRequest, dict]]): - The request object. The request for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.RunQueryResponse]: - The response for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - """ - # Create or coerce a protobuf request object. - request = firestore.RunQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_query, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_aggregation_query(self, - request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Awaitable[AsyncIterable[firestore.RunAggregationQueryResponse]]: - r"""Runs an aggregation query. - - Rather than producing [Document][google.firestore.v1.Document] - results like - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], - this API allows running an aggregation to produce a series of - [AggregationResult][google.firestore.v1.AggregationResult] - server-side. - - High-Level Example: - - :: - - -- Return the number of documents in table given a filter. - SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_run_aggregation_query(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.RunAggregationQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = await client.run_aggregation_query(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.RunAggregationQueryRequest, dict]]): - The request object. The request for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]: - The response for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - """ - # Create or coerce a protobuf request object. - request = firestore.RunAggregationQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_aggregation_query, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def partition_query(self, - request: Optional[Union[firestore.PartitionQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.PartitionQueryAsyncPager: - r"""Partitions a query by returning partition cursors - that can be used to run the query in parallel. The - returned partition cursors are split points that can be - used by RunQuery as starting/end points for the query - results. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_partition_query(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.PartitionQueryRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.partition_query(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.PartitionQueryRequest, dict]]): - The request object. The request for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryAsyncPager: - The response for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - request = firestore.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.partition_query, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.PartitionQueryAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def write(self, - requests: Optional[AsyncIterator[firestore.WriteRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Awaitable[AsyncIterable[firestore.WriteResponse]]: - r"""Streams batches of document updates and deletes, in - order. This method is only available via gRPC or - WebChannel (not REST). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_write(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.WriteRequest( - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.WriteRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.write(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - - Args: - requests (AsyncIterator[`google.cloud.firestore_v1.types.WriteRequest`]): - The request object AsyncIterator. The request for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - The first request creates a stream, or resumes an - existing one from a token. - - When creating a new stream, the server replies with a - response containing only an ID and a token, to use in - the next request. - - When resuming a stream, the server first streams any - responses later than the given token, then a response - containing only an up-to-date token, to use in the next - request. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.WriteResponse]: - The response for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.write, - default_timeout=86400.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def listen(self, - requests: Optional[AsyncIterator[firestore.ListenRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Awaitable[AsyncIterable[firestore.ListenResponse]]: - r"""Listens to changes. This method is only available via - gRPC or WebChannel (not REST). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_listen(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - add_target = firestore_v1.Target() - add_target.resume_token = b'resume_token_blob' - - request = firestore_v1.ListenRequest( - add_target=add_target, - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.ListenRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.listen(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - - Args: - requests (AsyncIterator[`google.cloud.firestore_v1.types.ListenRequest`]): - The request object AsyncIterator. A request for - [Firestore.Listen][google.firestore.v1.Firestore.Listen] - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.ListenResponse]: - The response for - [Firestore.Listen][google.firestore.v1.Firestore.Listen]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.listen, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=86400.0, - ), - default_timeout=86400.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_collection_ids(self, - request: Optional[Union[firestore.ListCollectionIdsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCollectionIdsAsyncPager: - r"""Lists all the collection IDs underneath a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_list_collection_ids(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.ListCollectionIdsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_collection_ids(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.ListCollectionIdsRequest, dict]]): - The request object. The request for - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - parent (:class:`str`): - Required. The parent document. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsAsyncPager: - The response from - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore.ListCollectionIdsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_collection_ids, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListCollectionIdsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def batch_write(self, - request: Optional[Union[firestore.BatchWriteRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.BatchWriteResponse: - r"""Applies a batch of write operations. - - The BatchWrite method does not apply the write operations - atomically and can apply them out of order. Method does not - allow more than one write per document. Each write succeeds or - fails independently. See the - [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for - the success status of each write. - - If you require an atomically applied set of writes, use - [Commit][google.firestore.v1.Firestore.Commit] instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_batch_write(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.BatchWriteRequest( - database="database_value", - ) - - # Make the request - response = await client.batch_write(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.BatchWriteRequest, dict]]): - The request object. The request for - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.BatchWriteResponse: - The response from - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - """ - # Create or coerce a protobuf request object. - request = firestore.BatchWriteRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.batch_write, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_document(self, - request: Optional[Union[firestore.CreateDocumentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> document.Document: - r"""Creates a new document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_create_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.CreateDocumentRequest( - parent="parent_value", - collection_id="collection_id_value", - ) - - # Make the request - response = await client.create_document(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.CreateDocumentRequest, dict]]): - The request object. The request for - [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - request = firestore.CreateDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_document, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - ("collection_id", request.collection_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "FirestoreAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "FirestoreAsyncClient", -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/client.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/client.py deleted file mode 100644 index 0d3cfdfb12..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/client.py +++ /dev/null @@ -1,2416 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.firestore_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.cloud.firestore_v1.services.firestore import pagers -from google.cloud.firestore_v1.types import aggregation_result -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.firestore_v1.types import query -from google.cloud.firestore_v1.types import query_profile -from google.cloud.firestore_v1.types import write as gf_write -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import FirestoreGrpcTransport -from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport -from .transports.rest import FirestoreRestTransport - - -class FirestoreClientMeta(type): - """Metaclass for the Firestore client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]] - _transport_registry["grpc"] = FirestoreGrpcTransport - _transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport - _transport_registry["rest"] = FirestoreRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[FirestoreTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class FirestoreClient(metaclass=FirestoreClientMeta): - """The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "firestore.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "firestore.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> FirestoreTransport: - """Returns the transport used by the client instance. - - Returns: - FirestoreTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = FirestoreClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = FirestoreClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = FirestoreClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FirestoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - FirestoreClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, FirestoreTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the firestore client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, FirestoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = FirestoreClient._read_environment_variables() - self._client_cert_source = FirestoreClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = FirestoreClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, FirestoreTransport) - if transport_provided: - # transport is a FirestoreTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(FirestoreTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - FirestoreClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def get_document(self, - request: Optional[Union[firestore.GetDocumentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> document.Document: - r"""Gets a single document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_get_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.GetDocumentRequest( - transaction=b'transaction_blob', - name="name_value", - ) - - # Make the request - response = client.get_document(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.GetDocumentRequest, dict]): - The request object. The request for - [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a firestore.GetDocumentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.GetDocumentRequest): - request = firestore.GetDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_document] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_documents(self, - request: Optional[Union[firestore.ListDocumentsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDocumentsPager: - r"""Lists documents. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_list_documents(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.ListDocumentsRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - page_result = client.list_documents(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.ListDocumentsRequest, dict]): - The request object. The request for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsPager: - The response for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a firestore.ListDocumentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.ListDocumentsRequest): - request = firestore.ListDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_documents] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - ("collection_id", request.collection_id), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDocumentsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_document(self, - request: Optional[Union[firestore.UpdateDocumentRequest, dict]] = None, - *, - document: Optional[gf_document.Document] = None, - update_mask: Optional[common.DocumentMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gf_document.Document: - r"""Updates or inserts a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_update_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.UpdateDocumentRequest( - ) - - # Make the request - response = client.update_document(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.UpdateDocumentRequest, dict]): - The request object. The request for - [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. - document (google.cloud.firestore_v1.types.Document): - Required. The updated document. - Creates the document if it does not - already exist. - - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to update. - None of the field paths in the mask may - contain a reserved name. - - If the document exists on the server and - has fields not referenced in the mask, - they are left unchanged. - Fields referenced in the mask, but not - present in the input document, are - deleted from the document on the server. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore.UpdateDocumentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.UpdateDocumentRequest): - request = firestore.UpdateDocumentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_document] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("document.name", request.document.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_document(self, - request: Optional[Union[firestore.DeleteDocumentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_delete_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.DeleteDocumentRequest( - name="name_value", - ) - - # Make the request - client.delete_document(request=request) - - Args: - request (Union[google.cloud.firestore_v1.types.DeleteDocumentRequest, dict]): - The request object. The request for - [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. - name (str): - Required. The resource name of the Document to delete. - In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore.DeleteDocumentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.DeleteDocumentRequest): - request = firestore.DeleteDocumentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_document] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def batch_get_documents(self, - request: Optional[Union[firestore.BatchGetDocumentsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[firestore.BatchGetDocumentsResponse]: - r"""Gets multiple documents. - - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_batch_get_documents(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.BatchGetDocumentsRequest( - transaction=b'transaction_blob', - database="database_value", - ) - - # Make the request - stream = client.batch_get_documents(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.BatchGetDocumentsRequest, dict]): - The request object. The request for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]: - The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a firestore.BatchGetDocumentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.BatchGetDocumentsRequest): - request = firestore.BatchGetDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_get_documents] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def begin_transaction(self, - request: Optional[Union[firestore.BeginTransactionRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.BeginTransactionResponse: - r"""Starts a new transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_begin_transaction(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.BeginTransactionRequest( - database="database_value", - ) - - # Make the request - response = client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.BeginTransactionRequest, dict]): - The request object. The request for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.BeginTransactionResponse: - The response for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore.BeginTransactionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.BeginTransactionRequest): - request = firestore.BeginTransactionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.begin_transaction] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def commit(self, - request: Optional[Union[firestore.CommitRequest, dict]] = None, - *, - database: Optional[str] = None, - writes: Optional[MutableSequence[gf_write.Write]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.CommitResponse: - r"""Commits a transaction, while optionally updating - documents. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_commit(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.CommitRequest( - database="database_value", - ) - - # Make the request - response = client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.CommitRequest, dict]): - The request object. The request for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - writes (MutableSequence[google.cloud.firestore_v1.types.Write]): - The writes to apply. - - Always executed atomically and in order. - - This corresponds to the ``writes`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.CommitResponse: - The response for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, writes]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore.CommitRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.CommitRequest): - request = firestore.CommitRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if writes is not None: - request.writes = writes - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.commit] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rollback(self, - request: Optional[Union[firestore.RollbackRequest, dict]] = None, - *, - database: Optional[str] = None, - transaction: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Rolls back a transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_rollback(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.RollbackRequest( - database="database_value", - transaction=b'transaction_blob', - ) - - # Make the request - client.rollback(request=request) - - Args: - request (Union[google.cloud.firestore_v1.types.RollbackRequest, dict]): - The request object. The request for - [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (bytes): - Required. The transaction to roll - back. - - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, transaction]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore.RollbackRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.RollbackRequest): - request = firestore.RollbackRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if transaction is not None: - request.transaction = transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rollback] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def run_query(self, - request: Optional[Union[firestore.RunQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[firestore.RunQueryResponse]: - r"""Runs a query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_run_query(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.RunQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = client.run_query(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.RunQueryRequest, dict]): - The request object. The request for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[google.cloud.firestore_v1.types.RunQueryResponse]: - The response for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a firestore.RunQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.RunQueryRequest): - request = firestore.RunQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_aggregation_query(self, - request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[firestore.RunAggregationQueryResponse]: - r"""Runs an aggregation query. - - Rather than producing [Document][google.firestore.v1.Document] - results like - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], - this API allows running an aggregation to produce a series of - [AggregationResult][google.firestore.v1.AggregationResult] - server-side. - - High-Level Example: - - :: - - -- Return the number of documents in table given a filter. - SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_run_aggregation_query(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.RunAggregationQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = client.run_aggregation_query(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.RunAggregationQueryRequest, dict]): - The request object. The request for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]: - The response for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a firestore.RunAggregationQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.RunAggregationQueryRequest): - request = firestore.RunAggregationQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_aggregation_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def partition_query(self, - request: Optional[Union[firestore.PartitionQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.PartitionQueryPager: - r"""Partitions a query by returning partition cursors - that can be used to run the query in parallel. The - returned partition cursors are split points that can be - used by RunQuery as starting/end points for the query - results. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_partition_query(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.PartitionQueryRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.partition_query(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.PartitionQueryRequest, dict]): - The request object. The request for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryPager: - The response for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a firestore.PartitionQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.PartitionQueryRequest): - request = firestore.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.partition_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.PartitionQueryPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def write(self, - requests: Optional[Iterator[firestore.WriteRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[firestore.WriteResponse]: - r"""Streams batches of document updates and deletes, in - order. This method is only available via gRPC or - WebChannel (not REST). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_write(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.WriteRequest( - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.WriteRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.write(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - - Args: - requests (Iterator[google.cloud.firestore_v1.types.WriteRequest]): - The request object iterator. The request for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - The first request creates a stream, or resumes an - existing one from a token. - - When creating a new stream, the server replies with a - response containing only an ID and a token, to use in - the next request. - - When resuming a stream, the server first streams any - responses later than the given token, then a response - containing only an up-to-date token, to use in the next - request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[google.cloud.firestore_v1.types.WriteResponse]: - The response for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.write] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def listen(self, - requests: Optional[Iterator[firestore.ListenRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[firestore.ListenResponse]: - r"""Listens to changes. This method is only available via - gRPC or WebChannel (not REST). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_listen(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - add_target = firestore_v1.Target() - add_target.resume_token = b'resume_token_blob' - - request = firestore_v1.ListenRequest( - add_target=add_target, - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.ListenRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.listen(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - - Args: - requests (Iterator[google.cloud.firestore_v1.types.ListenRequest]): - The request object iterator. A request for - [Firestore.Listen][google.firestore.v1.Firestore.Listen] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[google.cloud.firestore_v1.types.ListenResponse]: - The response for - [Firestore.Listen][google.firestore.v1.Firestore.Listen]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.listen] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_collection_ids(self, - request: Optional[Union[firestore.ListCollectionIdsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCollectionIdsPager: - r"""Lists all the collection IDs underneath a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_list_collection_ids(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.ListCollectionIdsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_collection_ids(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.ListCollectionIdsRequest, dict]): - The request object. The request for - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - parent (str): - Required. The parent document. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsPager: - The response from - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore.ListCollectionIdsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.ListCollectionIdsRequest): - request = firestore.ListCollectionIdsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_collection_ids] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListCollectionIdsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_write(self, - request: Optional[Union[firestore.BatchWriteRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.BatchWriteResponse: - r"""Applies a batch of write operations. - - The BatchWrite method does not apply the write operations - atomically and can apply them out of order. Method does not - allow more than one write per document. Each write succeeds or - fails independently. See the - [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for - the success status of each write. - - If you require an atomically applied set of writes, use - [Commit][google.firestore.v1.Firestore.Commit] instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_batch_write(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.BatchWriteRequest( - database="database_value", - ) - - # Make the request - response = client.batch_write(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.BatchWriteRequest, dict]): - The request object. The request for - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.BatchWriteResponse: - The response from - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a firestore.BatchWriteRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.BatchWriteRequest): - request = firestore.BatchWriteRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_write] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_document(self, - request: Optional[Union[firestore.CreateDocumentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> document.Document: - r"""Creates a new document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_create_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.CreateDocumentRequest( - parent="parent_value", - collection_id="collection_id_value", - ) - - # Make the request - response = client.create_document(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.CreateDocumentRequest, dict]): - The request object. The request for - [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a firestore.CreateDocumentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore.CreateDocumentRequest): - request = firestore.CreateDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_document] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - ("collection_id", request.collection_id), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "FirestoreClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "FirestoreClient", -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/pagers.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/pagers.py deleted file mode 100644 index 81b0d97435..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/pagers.py +++ /dev/null @@ -1,383 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import firestore -from google.cloud.firestore_v1.types import query - - -class ListDocumentsPager: - """A pager for iterating through ``list_documents`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``documents`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDocuments`` requests and continue to iterate - through the ``documents`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., firestore.ListDocumentsResponse], - request: firestore.ListDocumentsRequest, - response: firestore.ListDocumentsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.ListDocumentsRequest): - The initial request object. - response (google.cloud.firestore_v1.types.ListDocumentsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore.ListDocumentsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore.ListDocumentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[document.Document]: - for page in self.pages: - yield from page.documents - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDocumentsAsyncPager: - """A pager for iterating through ``list_documents`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``documents`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDocuments`` requests and continue to iterate - through the ``documents`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[firestore.ListDocumentsResponse]], - request: firestore.ListDocumentsRequest, - response: firestore.ListDocumentsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.ListDocumentsRequest): - The initial request object. - response (google.cloud.firestore_v1.types.ListDocumentsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore.ListDocumentsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore.ListDocumentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[document.Document]: - async def async_generator(): - async for page in self.pages: - for response in page.documents: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class PartitionQueryPager: - """A pager for iterating through ``partition_query`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` object, and - provides an ``__iter__`` method to iterate through its - ``partitions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``PartitionQuery`` requests and continue to iterate - through the ``partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., firestore.PartitionQueryResponse], - request: firestore.PartitionQueryRequest, - response: firestore.PartitionQueryResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.PartitionQueryRequest): - The initial request object. - response (google.cloud.firestore_v1.types.PartitionQueryResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore.PartitionQueryRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore.PartitionQueryResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[query.Cursor]: - for page in self.pages: - yield from page.partitions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class PartitionQueryAsyncPager: - """A pager for iterating through ``partition_query`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``partitions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``PartitionQuery`` requests and continue to iterate - through the ``partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[firestore.PartitionQueryResponse]], - request: firestore.PartitionQueryRequest, - response: firestore.PartitionQueryResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.PartitionQueryRequest): - The initial request object. - response (google.cloud.firestore_v1.types.PartitionQueryResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore.PartitionQueryRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore.PartitionQueryResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[query.Cursor]: - async def async_generator(): - async for page in self.pages: - for response in page.partitions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListCollectionIdsPager: - """A pager for iterating through ``list_collection_ids`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``collection_ids`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListCollectionIds`` requests and continue to iterate - through the ``collection_ids`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., firestore.ListCollectionIdsResponse], - request: firestore.ListCollectionIdsRequest, - response: firestore.ListCollectionIdsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.ListCollectionIdsRequest): - The initial request object. - response (google.cloud.firestore_v1.types.ListCollectionIdsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore.ListCollectionIdsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore.ListCollectionIdsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[str]: - for page in self.pages: - yield from page.collection_ids - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListCollectionIdsAsyncPager: - """A pager for iterating through ``list_collection_ids`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``collection_ids`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListCollectionIds`` requests and continue to iterate - through the ``collection_ids`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[firestore.ListCollectionIdsResponse]], - request: firestore.ListCollectionIdsRequest, - response: firestore.ListCollectionIdsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.ListCollectionIdsRequest): - The initial request object. - response (google.cloud.firestore_v1.types.ListCollectionIdsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore.ListCollectionIdsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore.ListCollectionIdsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[str]: - async def async_generator(): - async for page in self.pages: - for response in page.collection_ids: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/__init__.py deleted file mode 100644 index f66168756b..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import FirestoreTransport -from .grpc import FirestoreGrpcTransport -from .grpc_asyncio import FirestoreGrpcAsyncIOTransport -from .rest import FirestoreRestTransport -from .rest import FirestoreRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]] -_transport_registry['grpc'] = FirestoreGrpcTransport -_transport_registry['grpc_asyncio'] = FirestoreGrpcAsyncIOTransport -_transport_registry['rest'] = FirestoreRestTransport - -__all__ = ( - 'FirestoreTransport', - 'FirestoreGrpcTransport', - 'FirestoreGrpcAsyncIOTransport', - 'FirestoreRestTransport', - 'FirestoreRestInterceptor', -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/base.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/base.py deleted file mode 100644 index 75dee60fd8..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/base.py +++ /dev/null @@ -1,532 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.firestore_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class FirestoreTransport(abc.ABC): - """Abstract transport class for Firestore.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', - ) - - DEFAULT_HOST: str = 'firestore.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.get_document: gapic_v1.method.wrap_method( - self.get_document, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_documents: gapic_v1.method.wrap_method( - self.list_documents, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_document: gapic_v1.method.wrap_method( - self.update_document, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_document: gapic_v1.method.wrap_method( - self.delete_document, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_get_documents: gapic_v1.method.wrap_method( - self.batch_get_documents, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.begin_transaction: gapic_v1.method.wrap_method( - self.begin_transaction, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.commit: gapic_v1.method.wrap_method( - self.commit, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.rollback: gapic_v1.method.wrap_method( - self.rollback, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.run_query: gapic_v1.method.wrap_method( - self.run_query, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.run_aggregation_query: gapic_v1.method.wrap_method( - self.run_aggregation_query, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.partition_query: gapic_v1.method.wrap_method( - self.partition_query, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.write: gapic_v1.method.wrap_method( - self.write, - default_timeout=86400.0, - client_info=client_info, - ), - self.listen: gapic_v1.method.wrap_method( - self.listen, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=86400.0, - ), - default_timeout=86400.0, - client_info=client_info, - ), - self.list_collection_ids: gapic_v1.method.wrap_method( - self.list_collection_ids, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_write: gapic_v1.method.wrap_method( - self.batch_write, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_document: gapic_v1.method.wrap_method( - self.create_document, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_document(self) -> Callable[ - [firestore.GetDocumentRequest], - Union[ - document.Document, - Awaitable[document.Document] - ]]: - raise NotImplementedError() - - @property - def list_documents(self) -> Callable[ - [firestore.ListDocumentsRequest], - Union[ - firestore.ListDocumentsResponse, - Awaitable[firestore.ListDocumentsResponse] - ]]: - raise NotImplementedError() - - @property - def update_document(self) -> Callable[ - [firestore.UpdateDocumentRequest], - Union[ - gf_document.Document, - Awaitable[gf_document.Document] - ]]: - raise NotImplementedError() - - @property - def delete_document(self) -> Callable[ - [firestore.DeleteDocumentRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def batch_get_documents(self) -> Callable[ - [firestore.BatchGetDocumentsRequest], - Union[ - firestore.BatchGetDocumentsResponse, - Awaitable[firestore.BatchGetDocumentsResponse] - ]]: - raise NotImplementedError() - - @property - def begin_transaction(self) -> Callable[ - [firestore.BeginTransactionRequest], - Union[ - firestore.BeginTransactionResponse, - Awaitable[firestore.BeginTransactionResponse] - ]]: - raise NotImplementedError() - - @property - def commit(self) -> Callable[ - [firestore.CommitRequest], - Union[ - firestore.CommitResponse, - Awaitable[firestore.CommitResponse] - ]]: - raise NotImplementedError() - - @property - def rollback(self) -> Callable[ - [firestore.RollbackRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def run_query(self) -> Callable[ - [firestore.RunQueryRequest], - Union[ - firestore.RunQueryResponse, - Awaitable[firestore.RunQueryResponse] - ]]: - raise NotImplementedError() - - @property - def run_aggregation_query(self) -> Callable[ - [firestore.RunAggregationQueryRequest], - Union[ - firestore.RunAggregationQueryResponse, - Awaitable[firestore.RunAggregationQueryResponse] - ]]: - raise NotImplementedError() - - @property - def partition_query(self) -> Callable[ - [firestore.PartitionQueryRequest], - Union[ - firestore.PartitionQueryResponse, - Awaitable[firestore.PartitionQueryResponse] - ]]: - raise NotImplementedError() - - @property - def write(self) -> Callable[ - [firestore.WriteRequest], - Union[ - firestore.WriteResponse, - Awaitable[firestore.WriteResponse] - ]]: - raise NotImplementedError() - - @property - def listen(self) -> Callable[ - [firestore.ListenRequest], - Union[ - firestore.ListenResponse, - Awaitable[firestore.ListenResponse] - ]]: - raise NotImplementedError() - - @property - def list_collection_ids(self) -> Callable[ - [firestore.ListCollectionIdsRequest], - Union[ - firestore.ListCollectionIdsResponse, - Awaitable[firestore.ListCollectionIdsResponse] - ]]: - raise NotImplementedError() - - @property - def batch_write(self) -> Callable[ - [firestore.BatchWriteRequest], - Union[ - firestore.BatchWriteResponse, - Awaitable[firestore.BatchWriteResponse] - ]]: - raise NotImplementedError() - - @property - def create_document(self) -> Callable[ - [firestore.CreateDocumentRequest], - Union[ - document.Document, - Awaitable[document.Document] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'FirestoreTransport', -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc.py deleted file mode 100644 index 61d093913b..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc.py +++ /dev/null @@ -1,774 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import FirestoreTransport, DEFAULT_CLIENT_INFO - - -class FirestoreGrpcTransport(FirestoreTransport): - """gRPC backend transport for Firestore. - - The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def get_document(self) -> Callable[ - [firestore.GetDocumentRequest], - document.Document]: - r"""Return a callable for the get document method over gRPC. - - Gets a single document. - - Returns: - Callable[[~.GetDocumentRequest], - ~.Document]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_document' not in self._stubs: - self._stubs['get_document'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/GetDocument', - request_serializer=firestore.GetDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs['get_document'] - - @property - def list_documents(self) -> Callable[ - [firestore.ListDocumentsRequest], - firestore.ListDocumentsResponse]: - r"""Return a callable for the list documents method over gRPC. - - Lists documents. - - Returns: - Callable[[~.ListDocumentsRequest], - ~.ListDocumentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_documents' not in self._stubs: - self._stubs['list_documents'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/ListDocuments', - request_serializer=firestore.ListDocumentsRequest.serialize, - response_deserializer=firestore.ListDocumentsResponse.deserialize, - ) - return self._stubs['list_documents'] - - @property - def update_document(self) -> Callable[ - [firestore.UpdateDocumentRequest], - gf_document.Document]: - r"""Return a callable for the update document method over gRPC. - - Updates or inserts a document. - - Returns: - Callable[[~.UpdateDocumentRequest], - ~.Document]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_document' not in self._stubs: - self._stubs['update_document'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/UpdateDocument', - request_serializer=firestore.UpdateDocumentRequest.serialize, - response_deserializer=gf_document.Document.deserialize, - ) - return self._stubs['update_document'] - - @property - def delete_document(self) -> Callable[ - [firestore.DeleteDocumentRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete document method over gRPC. - - Deletes a document. - - Returns: - Callable[[~.DeleteDocumentRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_document' not in self._stubs: - self._stubs['delete_document'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/DeleteDocument', - request_serializer=firestore.DeleteDocumentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_document'] - - @property - def batch_get_documents(self) -> Callable[ - [firestore.BatchGetDocumentsRequest], - firestore.BatchGetDocumentsResponse]: - r"""Return a callable for the batch get documents method over gRPC. - - Gets multiple documents. - - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - Returns: - Callable[[~.BatchGetDocumentsRequest], - ~.BatchGetDocumentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_get_documents' not in self._stubs: - self._stubs['batch_get_documents'] = self.grpc_channel.unary_stream( - '/google.firestore.v1.Firestore/BatchGetDocuments', - request_serializer=firestore.BatchGetDocumentsRequest.serialize, - response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, - ) - return self._stubs['batch_get_documents'] - - @property - def begin_transaction(self) -> Callable[ - [firestore.BeginTransactionRequest], - firestore.BeginTransactionResponse]: - r"""Return a callable for the begin transaction method over gRPC. - - Starts a new transaction. - - Returns: - Callable[[~.BeginTransactionRequest], - ~.BeginTransactionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/BeginTransaction', - request_serializer=firestore.BeginTransactionRequest.serialize, - response_deserializer=firestore.BeginTransactionResponse.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [firestore.CommitRequest], - firestore.CommitResponse]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction, while optionally updating - documents. - - Returns: - Callable[[~.CommitRequest], - ~.CommitResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/Commit', - request_serializer=firestore.CommitRequest.serialize, - response_deserializer=firestore.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [firestore.RollbackRequest], - empty_pb2.Empty]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction. - - Returns: - Callable[[~.RollbackRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/Rollback', - request_serializer=firestore.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['rollback'] - - @property - def run_query(self) -> Callable[ - [firestore.RunQueryRequest], - firestore.RunQueryResponse]: - r"""Return a callable for the run query method over gRPC. - - Runs a query. - - Returns: - Callable[[~.RunQueryRequest], - ~.RunQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_query' not in self._stubs: - self._stubs['run_query'] = self.grpc_channel.unary_stream( - '/google.firestore.v1.Firestore/RunQuery', - request_serializer=firestore.RunQueryRequest.serialize, - response_deserializer=firestore.RunQueryResponse.deserialize, - ) - return self._stubs['run_query'] - - @property - def run_aggregation_query(self) -> Callable[ - [firestore.RunAggregationQueryRequest], - firestore.RunAggregationQueryResponse]: - r"""Return a callable for the run aggregation query method over gRPC. - - Runs an aggregation query. - - Rather than producing [Document][google.firestore.v1.Document] - results like - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], - this API allows running an aggregation to produce a series of - [AggregationResult][google.firestore.v1.AggregationResult] - server-side. - - High-Level Example: - - :: - - -- Return the number of documents in table given a filter. - SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); - - Returns: - Callable[[~.RunAggregationQueryRequest], - ~.RunAggregationQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_aggregation_query' not in self._stubs: - self._stubs['run_aggregation_query'] = self.grpc_channel.unary_stream( - '/google.firestore.v1.Firestore/RunAggregationQuery', - request_serializer=firestore.RunAggregationQueryRequest.serialize, - response_deserializer=firestore.RunAggregationQueryResponse.deserialize, - ) - return self._stubs['run_aggregation_query'] - - @property - def partition_query(self) -> Callable[ - [firestore.PartitionQueryRequest], - firestore.PartitionQueryResponse]: - r"""Return a callable for the partition query method over gRPC. - - Partitions a query by returning partition cursors - that can be used to run the query in parallel. The - returned partition cursors are split points that can be - used by RunQuery as starting/end points for the query - results. - - Returns: - Callable[[~.PartitionQueryRequest], - ~.PartitionQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_query' not in self._stubs: - self._stubs['partition_query'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/PartitionQuery', - request_serializer=firestore.PartitionQueryRequest.serialize, - response_deserializer=firestore.PartitionQueryResponse.deserialize, - ) - return self._stubs['partition_query'] - - @property - def write(self) -> Callable[ - [firestore.WriteRequest], - firestore.WriteResponse]: - r"""Return a callable for the write method over gRPC. - - Streams batches of document updates and deletes, in - order. This method is only available via gRPC or - WebChannel (not REST). - - Returns: - Callable[[~.WriteRequest], - ~.WriteResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'write' not in self._stubs: - self._stubs['write'] = self.grpc_channel.stream_stream( - '/google.firestore.v1.Firestore/Write', - request_serializer=firestore.WriteRequest.serialize, - response_deserializer=firestore.WriteResponse.deserialize, - ) - return self._stubs['write'] - - @property - def listen(self) -> Callable[ - [firestore.ListenRequest], - firestore.ListenResponse]: - r"""Return a callable for the listen method over gRPC. - - Listens to changes. This method is only available via - gRPC or WebChannel (not REST). - - Returns: - Callable[[~.ListenRequest], - ~.ListenResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'listen' not in self._stubs: - self._stubs['listen'] = self.grpc_channel.stream_stream( - '/google.firestore.v1.Firestore/Listen', - request_serializer=firestore.ListenRequest.serialize, - response_deserializer=firestore.ListenResponse.deserialize, - ) - return self._stubs['listen'] - - @property - def list_collection_ids(self) -> Callable[ - [firestore.ListCollectionIdsRequest], - firestore.ListCollectionIdsResponse]: - r"""Return a callable for the list collection ids method over gRPC. - - Lists all the collection IDs underneath a document. - - Returns: - Callable[[~.ListCollectionIdsRequest], - ~.ListCollectionIdsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_collection_ids' not in self._stubs: - self._stubs['list_collection_ids'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/ListCollectionIds', - request_serializer=firestore.ListCollectionIdsRequest.serialize, - response_deserializer=firestore.ListCollectionIdsResponse.deserialize, - ) - return self._stubs['list_collection_ids'] - - @property - def batch_write(self) -> Callable[ - [firestore.BatchWriteRequest], - firestore.BatchWriteResponse]: - r"""Return a callable for the batch write method over gRPC. - - Applies a batch of write operations. - - The BatchWrite method does not apply the write operations - atomically and can apply them out of order. Method does not - allow more than one write per document. Each write succeeds or - fails independently. See the - [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for - the success status of each write. - - If you require an atomically applied set of writes, use - [Commit][google.firestore.v1.Firestore.Commit] instead. - - Returns: - Callable[[~.BatchWriteRequest], - ~.BatchWriteResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_write' not in self._stubs: - self._stubs['batch_write'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/BatchWrite', - request_serializer=firestore.BatchWriteRequest.serialize, - response_deserializer=firestore.BatchWriteResponse.deserialize, - ) - return self._stubs['batch_write'] - - @property - def create_document(self) -> Callable[ - [firestore.CreateDocumentRequest], - document.Document]: - r"""Return a callable for the create document method over gRPC. - - Creates a new document. - - Returns: - Callable[[~.CreateDocumentRequest], - ~.Document]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_document' not in self._stubs: - self._stubs['create_document'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/CreateDocument', - request_serializer=firestore.CreateDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs['create_document'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'FirestoreGrpcTransport', -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py deleted file mode 100644 index 64cef2eba2..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py +++ /dev/null @@ -1,773 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import FirestoreTransport, DEFAULT_CLIENT_INFO -from .grpc import FirestoreGrpcTransport - - -class FirestoreGrpcAsyncIOTransport(FirestoreTransport): - """gRPC AsyncIO backend transport for Firestore. - - The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def get_document(self) -> Callable[ - [firestore.GetDocumentRequest], - Awaitable[document.Document]]: - r"""Return a callable for the get document method over gRPC. - - Gets a single document. - - Returns: - Callable[[~.GetDocumentRequest], - Awaitable[~.Document]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_document' not in self._stubs: - self._stubs['get_document'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/GetDocument', - request_serializer=firestore.GetDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs['get_document'] - - @property - def list_documents(self) -> Callable[ - [firestore.ListDocumentsRequest], - Awaitable[firestore.ListDocumentsResponse]]: - r"""Return a callable for the list documents method over gRPC. - - Lists documents. - - Returns: - Callable[[~.ListDocumentsRequest], - Awaitable[~.ListDocumentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_documents' not in self._stubs: - self._stubs['list_documents'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/ListDocuments', - request_serializer=firestore.ListDocumentsRequest.serialize, - response_deserializer=firestore.ListDocumentsResponse.deserialize, - ) - return self._stubs['list_documents'] - - @property - def update_document(self) -> Callable[ - [firestore.UpdateDocumentRequest], - Awaitable[gf_document.Document]]: - r"""Return a callable for the update document method over gRPC. - - Updates or inserts a document. - - Returns: - Callable[[~.UpdateDocumentRequest], - Awaitable[~.Document]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_document' not in self._stubs: - self._stubs['update_document'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/UpdateDocument', - request_serializer=firestore.UpdateDocumentRequest.serialize, - response_deserializer=gf_document.Document.deserialize, - ) - return self._stubs['update_document'] - - @property - def delete_document(self) -> Callable[ - [firestore.DeleteDocumentRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete document method over gRPC. - - Deletes a document. - - Returns: - Callable[[~.DeleteDocumentRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_document' not in self._stubs: - self._stubs['delete_document'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/DeleteDocument', - request_serializer=firestore.DeleteDocumentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_document'] - - @property - def batch_get_documents(self) -> Callable[ - [firestore.BatchGetDocumentsRequest], - Awaitable[firestore.BatchGetDocumentsResponse]]: - r"""Return a callable for the batch get documents method over gRPC. - - Gets multiple documents. - - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - Returns: - Callable[[~.BatchGetDocumentsRequest], - Awaitable[~.BatchGetDocumentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_get_documents' not in self._stubs: - self._stubs['batch_get_documents'] = self.grpc_channel.unary_stream( - '/google.firestore.v1.Firestore/BatchGetDocuments', - request_serializer=firestore.BatchGetDocumentsRequest.serialize, - response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, - ) - return self._stubs['batch_get_documents'] - - @property - def begin_transaction(self) -> Callable[ - [firestore.BeginTransactionRequest], - Awaitable[firestore.BeginTransactionResponse]]: - r"""Return a callable for the begin transaction method over gRPC. - - Starts a new transaction. - - Returns: - Callable[[~.BeginTransactionRequest], - Awaitable[~.BeginTransactionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/BeginTransaction', - request_serializer=firestore.BeginTransactionRequest.serialize, - response_deserializer=firestore.BeginTransactionResponse.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [firestore.CommitRequest], - Awaitable[firestore.CommitResponse]]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction, while optionally updating - documents. - - Returns: - Callable[[~.CommitRequest], - Awaitable[~.CommitResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/Commit', - request_serializer=firestore.CommitRequest.serialize, - response_deserializer=firestore.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [firestore.RollbackRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction. - - Returns: - Callable[[~.RollbackRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/Rollback', - request_serializer=firestore.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['rollback'] - - @property - def run_query(self) -> Callable[ - [firestore.RunQueryRequest], - Awaitable[firestore.RunQueryResponse]]: - r"""Return a callable for the run query method over gRPC. - - Runs a query. - - Returns: - Callable[[~.RunQueryRequest], - Awaitable[~.RunQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_query' not in self._stubs: - self._stubs['run_query'] = self.grpc_channel.unary_stream( - '/google.firestore.v1.Firestore/RunQuery', - request_serializer=firestore.RunQueryRequest.serialize, - response_deserializer=firestore.RunQueryResponse.deserialize, - ) - return self._stubs['run_query'] - - @property - def run_aggregation_query(self) -> Callable[ - [firestore.RunAggregationQueryRequest], - Awaitable[firestore.RunAggregationQueryResponse]]: - r"""Return a callable for the run aggregation query method over gRPC. - - Runs an aggregation query. - - Rather than producing [Document][google.firestore.v1.Document] - results like - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], - this API allows running an aggregation to produce a series of - [AggregationResult][google.firestore.v1.AggregationResult] - server-side. - - High-Level Example: - - :: - - -- Return the number of documents in table given a filter. - SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); - - Returns: - Callable[[~.RunAggregationQueryRequest], - Awaitable[~.RunAggregationQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_aggregation_query' not in self._stubs: - self._stubs['run_aggregation_query'] = self.grpc_channel.unary_stream( - '/google.firestore.v1.Firestore/RunAggregationQuery', - request_serializer=firestore.RunAggregationQueryRequest.serialize, - response_deserializer=firestore.RunAggregationQueryResponse.deserialize, - ) - return self._stubs['run_aggregation_query'] - - @property - def partition_query(self) -> Callable[ - [firestore.PartitionQueryRequest], - Awaitable[firestore.PartitionQueryResponse]]: - r"""Return a callable for the partition query method over gRPC. - - Partitions a query by returning partition cursors - that can be used to run the query in parallel. The - returned partition cursors are split points that can be - used by RunQuery as starting/end points for the query - results. - - Returns: - Callable[[~.PartitionQueryRequest], - Awaitable[~.PartitionQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_query' not in self._stubs: - self._stubs['partition_query'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/PartitionQuery', - request_serializer=firestore.PartitionQueryRequest.serialize, - response_deserializer=firestore.PartitionQueryResponse.deserialize, - ) - return self._stubs['partition_query'] - - @property - def write(self) -> Callable[ - [firestore.WriteRequest], - Awaitable[firestore.WriteResponse]]: - r"""Return a callable for the write method over gRPC. - - Streams batches of document updates and deletes, in - order. This method is only available via gRPC or - WebChannel (not REST). - - Returns: - Callable[[~.WriteRequest], - Awaitable[~.WriteResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'write' not in self._stubs: - self._stubs['write'] = self.grpc_channel.stream_stream( - '/google.firestore.v1.Firestore/Write', - request_serializer=firestore.WriteRequest.serialize, - response_deserializer=firestore.WriteResponse.deserialize, - ) - return self._stubs['write'] - - @property - def listen(self) -> Callable[ - [firestore.ListenRequest], - Awaitable[firestore.ListenResponse]]: - r"""Return a callable for the listen method over gRPC. - - Listens to changes. This method is only available via - gRPC or WebChannel (not REST). - - Returns: - Callable[[~.ListenRequest], - Awaitable[~.ListenResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'listen' not in self._stubs: - self._stubs['listen'] = self.grpc_channel.stream_stream( - '/google.firestore.v1.Firestore/Listen', - request_serializer=firestore.ListenRequest.serialize, - response_deserializer=firestore.ListenResponse.deserialize, - ) - return self._stubs['listen'] - - @property - def list_collection_ids(self) -> Callable[ - [firestore.ListCollectionIdsRequest], - Awaitable[firestore.ListCollectionIdsResponse]]: - r"""Return a callable for the list collection ids method over gRPC. - - Lists all the collection IDs underneath a document. - - Returns: - Callable[[~.ListCollectionIdsRequest], - Awaitable[~.ListCollectionIdsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_collection_ids' not in self._stubs: - self._stubs['list_collection_ids'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/ListCollectionIds', - request_serializer=firestore.ListCollectionIdsRequest.serialize, - response_deserializer=firestore.ListCollectionIdsResponse.deserialize, - ) - return self._stubs['list_collection_ids'] - - @property - def batch_write(self) -> Callable[ - [firestore.BatchWriteRequest], - Awaitable[firestore.BatchWriteResponse]]: - r"""Return a callable for the batch write method over gRPC. - - Applies a batch of write operations. - - The BatchWrite method does not apply the write operations - atomically and can apply them out of order. Method does not - allow more than one write per document. Each write succeeds or - fails independently. See the - [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for - the success status of each write. - - If you require an atomically applied set of writes, use - [Commit][google.firestore.v1.Firestore.Commit] instead. - - Returns: - Callable[[~.BatchWriteRequest], - Awaitable[~.BatchWriteResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_write' not in self._stubs: - self._stubs['batch_write'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/BatchWrite', - request_serializer=firestore.BatchWriteRequest.serialize, - response_deserializer=firestore.BatchWriteResponse.deserialize, - ) - return self._stubs['batch_write'] - - @property - def create_document(self) -> Callable[ - [firestore.CreateDocumentRequest], - Awaitable[document.Document]]: - r"""Return a callable for the create document method over gRPC. - - Creates a new document. - - Returns: - Callable[[~.CreateDocumentRequest], - Awaitable[~.Document]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_document' not in self._stubs: - self._stubs['create_document'] = self.grpc_channel.unary_unary( - '/google.firestore.v1.Firestore/CreateDocument', - request_serializer=firestore.CreateDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs['create_document'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'FirestoreGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/rest.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/rest.py deleted file mode 100644 index 55351bcc73..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/services/firestore/transports/rest.py +++ /dev/null @@ -1,2188 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -from .base import FirestoreTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class FirestoreRestInterceptor: - """Interceptor for Firestore. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the FirestoreRestTransport. - - .. code-block:: python - class MyCustomFirestoreInterceptor(FirestoreRestInterceptor): - def pre_batch_get_documents(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_get_documents(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_batch_write(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_write(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_begin_transaction(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_begin_transaction(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_commit(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_commit(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_document(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_document(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_document(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_document(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_document(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_collection_ids(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_collection_ids(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_documents(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_documents(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_partition_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_partition_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rollback(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_run_aggregation_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_aggregation_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_run_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_document(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_document(self, response): - logging.log(f"Received response: {response}") - return response - - transport = FirestoreRestTransport(interceptor=MyCustomFirestoreInterceptor()) - client = FirestoreClient(transport=transport) - - - """ - def pre_batch_get_documents(self, request: firestore.BatchGetDocumentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.BatchGetDocumentsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for batch_get_documents - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_batch_get_documents(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for batch_get_documents - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_batch_write(self, request: firestore.BatchWriteRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.BatchWriteRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for batch_write - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_batch_write(self, response: firestore.BatchWriteResponse) -> firestore.BatchWriteResponse: - """Post-rpc interceptor for batch_write - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_begin_transaction(self, request: firestore.BeginTransactionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.BeginTransactionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for begin_transaction - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_begin_transaction(self, response: firestore.BeginTransactionResponse) -> firestore.BeginTransactionResponse: - """Post-rpc interceptor for begin_transaction - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_commit(self, request: firestore.CommitRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.CommitRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for commit - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_commit(self, response: firestore.CommitResponse) -> firestore.CommitResponse: - """Post-rpc interceptor for commit - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_create_document(self, request: firestore.CreateDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.CreateDocumentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_document - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_create_document(self, response: document.Document) -> document.Document: - """Post-rpc interceptor for create_document - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_delete_document(self, request: firestore.DeleteDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.DeleteDocumentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_document - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def pre_get_document(self, request: firestore.GetDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.GetDocumentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_document - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_get_document(self, response: document.Document) -> document.Document: - """Post-rpc interceptor for get_document - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_list_collection_ids(self, request: firestore.ListCollectionIdsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.ListCollectionIdsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_collection_ids - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_list_collection_ids(self, response: firestore.ListCollectionIdsResponse) -> firestore.ListCollectionIdsResponse: - """Post-rpc interceptor for list_collection_ids - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_list_documents(self, request: firestore.ListDocumentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.ListDocumentsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_documents - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_list_documents(self, response: firestore.ListDocumentsResponse) -> firestore.ListDocumentsResponse: - """Post-rpc interceptor for list_documents - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_partition_query(self, request: firestore.PartitionQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.PartitionQueryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for partition_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_partition_query(self, response: firestore.PartitionQueryResponse) -> firestore.PartitionQueryResponse: - """Post-rpc interceptor for partition_query - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_rollback(self, request: firestore.RollbackRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.RollbackRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for rollback - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def pre_run_aggregation_query(self, request: firestore.RunAggregationQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.RunAggregationQueryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for run_aggregation_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_run_aggregation_query(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for run_aggregation_query - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_run_query(self, request: firestore.RunQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.RunQueryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for run_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_run_query(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for run_query - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_update_document(self, request: firestore.UpdateDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore.UpdateDocumentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_document - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_update_document(self, response: gf_document.Document) -> gf_document.Document: - """Post-rpc interceptor for update_document - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class FirestoreRestStub: - _session: AuthorizedSession - _host: str - _interceptor: FirestoreRestInterceptor - - -class FirestoreRestTransport(FirestoreTransport): - """REST backend transport for Firestore. - - The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[FirestoreRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or FirestoreRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _BatchGetDocuments(FirestoreRestStub): - def __hash__(self): - return hash("BatchGetDocuments") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.BatchGetDocumentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> rest_streaming.ResponseIterator: - r"""Call the batch get documents method over HTTP. - - Args: - request (~.firestore.BatchGetDocumentsRequest): - The request object. The request for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.BatchGetDocumentsResponse: - The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/databases/*}/documents:batchGet', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_batch_get_documents(request, metadata) - pb_request = firestore.BatchGetDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, firestore.BatchGetDocumentsResponse) - resp = self._interceptor.post_batch_get_documents(resp) - return resp - - class _BatchWrite(FirestoreRestStub): - def __hash__(self): - return hash("BatchWrite") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.BatchWriteRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore.BatchWriteResponse: - r"""Call the batch write method over HTTP. - - Args: - request (~.firestore.BatchWriteRequest): - The request object. The request for - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.BatchWriteResponse: - The response from - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/databases/*}/documents:batchWrite', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_batch_write(request, metadata) - pb_request = firestore.BatchWriteRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.BatchWriteResponse() - pb_resp = firestore.BatchWriteResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_batch_write(resp) - return resp - - class _BeginTransaction(FirestoreRestStub): - def __hash__(self): - return hash("BeginTransaction") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.BeginTransactionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore.BeginTransactionResponse: - r"""Call the begin transaction method over HTTP. - - Args: - request (~.firestore.BeginTransactionRequest): - The request object. The request for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.BeginTransactionResponse: - The response for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/databases/*}/documents:beginTransaction', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_begin_transaction(request, metadata) - pb_request = firestore.BeginTransactionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.BeginTransactionResponse() - pb_resp = firestore.BeginTransactionResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_begin_transaction(resp) - return resp - - class _Commit(FirestoreRestStub): - def __hash__(self): - return hash("Commit") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.CommitRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore.CommitResponse: - r"""Call the commit method over HTTP. - - Args: - request (~.firestore.CommitRequest): - The request object. The request for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.CommitResponse: - The response for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/databases/*}/documents:commit', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_commit(request, metadata) - pb_request = firestore.CommitRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.CommitResponse() - pb_resp = firestore.CommitResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_commit(resp) - return resp - - class _CreateDocument(FirestoreRestStub): - def __hash__(self): - return hash("CreateDocument") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.CreateDocumentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> document.Document: - r"""Call the create document method over HTTP. - - Args: - request (~.firestore.CreateDocumentRequest): - The request object. The request for - [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.document.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/documents/**}/{collection_id}', - 'body': 'document', - }, - ] - request, metadata = self._interceptor.pre_create_document(request, metadata) - pb_request = firestore.CreateDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = document.Document() - pb_resp = document.Document.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_document(resp) - return resp - - class _DeleteDocument(FirestoreRestStub): - def __hash__(self): - return hash("DeleteDocument") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.DeleteDocumentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete document method over HTTP. - - Args: - request (~.firestore.DeleteDocumentRequest): - The request object. The request for - [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/databases/*/documents/*/**}', - }, - ] - request, metadata = self._interceptor.pre_delete_document(request, metadata) - pb_request = firestore.DeleteDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetDocument(FirestoreRestStub): - def __hash__(self): - return hash("GetDocument") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.GetDocumentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> document.Document: - r"""Call the get document method over HTTP. - - Args: - request (~.firestore.GetDocumentRequest): - The request object. The request for - [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.document.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*/documents/*/**}', - }, - ] - request, metadata = self._interceptor.pre_get_document(request, metadata) - pb_request = firestore.GetDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = document.Document() - pb_resp = document.Document.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_document(resp) - return resp - - class _ListCollectionIds(FirestoreRestStub): - def __hash__(self): - return hash("ListCollectionIds") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.ListCollectionIdsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore.ListCollectionIdsResponse: - r"""Call the list collection ids method over HTTP. - - Args: - request (~.firestore.ListCollectionIdsRequest): - The request object. The request for - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.ListCollectionIdsResponse: - The response from - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/documents}:listCollectionIds', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_list_collection_ids(request, metadata) - pb_request = firestore.ListCollectionIdsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.ListCollectionIdsResponse() - pb_resp = firestore.ListCollectionIdsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_collection_ids(resp) - return resp - - class _ListDocuments(FirestoreRestStub): - def __hash__(self): - return hash("ListDocuments") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.ListDocumentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore.ListDocumentsResponse: - r"""Call the list documents method over HTTP. - - Args: - request (~.firestore.ListDocumentsRequest): - The request object. The request for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.ListDocumentsResponse: - The response for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}', - }, -{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/databases/*/documents}/{collection_id}', - }, - ] - request, metadata = self._interceptor.pre_list_documents(request, metadata) - pb_request = firestore.ListDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.ListDocumentsResponse() - pb_resp = firestore.ListDocumentsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_documents(resp) - return resp - - class _Listen(FirestoreRestStub): - def __hash__(self): - return hash("Listen") - - def __call__(self, - request: firestore.ListenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> rest_streaming.ResponseIterator: - raise NotImplementedError( - "Method Listen is not available over REST transport" - ) - class _PartitionQuery(FirestoreRestStub): - def __hash__(self): - return hash("PartitionQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.PartitionQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore.PartitionQueryResponse: - r"""Call the partition query method over HTTP. - - Args: - request (~.firestore.PartitionQueryRequest): - The request object. The request for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.PartitionQueryResponse: - The response for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/documents}:partitionQuery', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}:partitionQuery', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_partition_query(request, metadata) - pb_request = firestore.PartitionQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.PartitionQueryResponse() - pb_resp = firestore.PartitionQueryResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_partition_query(resp) - return resp - - class _Rollback(FirestoreRestStub): - def __hash__(self): - return hash("Rollback") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.RollbackRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the rollback method over HTTP. - - Args: - request (~.firestore.RollbackRequest): - The request object. The request for - [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/databases/*}/documents:rollback', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_rollback(request, metadata) - pb_request = firestore.RollbackRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _RunAggregationQuery(FirestoreRestStub): - def __hash__(self): - return hash("RunAggregationQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.RunAggregationQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> rest_streaming.ResponseIterator: - r"""Call the run aggregation query method over HTTP. - - Args: - request (~.firestore.RunAggregationQueryRequest): - The request object. The request for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.RunAggregationQueryResponse: - The response for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/documents}:runAggregationQuery', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}:runAggregationQuery', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_run_aggregation_query(request, metadata) - pb_request = firestore.RunAggregationQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, firestore.RunAggregationQueryResponse) - resp = self._interceptor.post_run_aggregation_query(resp) - return resp - - class _RunQuery(FirestoreRestStub): - def __hash__(self): - return hash("RunQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.RunQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> rest_streaming.ResponseIterator: - r"""Call the run query method over HTTP. - - Args: - request (~.firestore.RunQueryRequest): - The request object. The request for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.RunQueryResponse: - The response for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/documents}:runQuery', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_run_query(request, metadata) - pb_request = firestore.RunQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, firestore.RunQueryResponse) - resp = self._interceptor.post_run_query(resp) - return resp - - class _UpdateDocument(FirestoreRestStub): - def __hash__(self): - return hash("UpdateDocument") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore.UpdateDocumentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> gf_document.Document: - r"""Call the update document method over HTTP. - - Args: - request (~.firestore.UpdateDocumentRequest): - The request object. The request for - [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.gf_document.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{document.name=projects/*/databases/*/documents/*/**}', - 'body': 'document', - }, - ] - request, metadata = self._interceptor.pre_update_document(request, metadata) - pb_request = firestore.UpdateDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gf_document.Document() - pb_resp = gf_document.Document.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_document(resp) - return resp - - class _Write(FirestoreRestStub): - def __hash__(self): - return hash("Write") - - def __call__(self, - request: firestore.WriteRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> rest_streaming.ResponseIterator: - raise NotImplementedError( - "Method Write is not available over REST transport" - ) - - @property - def batch_get_documents(self) -> Callable[ - [firestore.BatchGetDocumentsRequest], - firestore.BatchGetDocumentsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchGetDocuments(self._session, self._host, self._interceptor) # type: ignore - - @property - def batch_write(self) -> Callable[ - [firestore.BatchWriteRequest], - firestore.BatchWriteResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchWrite(self._session, self._host, self._interceptor) # type: ignore - - @property - def begin_transaction(self) -> Callable[ - [firestore.BeginTransactionRequest], - firestore.BeginTransactionResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BeginTransaction(self._session, self._host, self._interceptor) # type: ignore - - @property - def commit(self) -> Callable[ - [firestore.CommitRequest], - firestore.CommitResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Commit(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_document(self) -> Callable[ - [firestore.CreateDocumentRequest], - document.Document]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDocument(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_document(self) -> Callable[ - [firestore.DeleteDocumentRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_document(self) -> Callable[ - [firestore.GetDocumentRequest], - document.Document]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDocument(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_collection_ids(self) -> Callable[ - [firestore.ListCollectionIdsRequest], - firestore.ListCollectionIdsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListCollectionIds(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_documents(self) -> Callable[ - [firestore.ListDocumentsRequest], - firestore.ListDocumentsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDocuments(self._session, self._host, self._interceptor) # type: ignore - - @property - def listen(self) -> Callable[ - [firestore.ListenRequest], - firestore.ListenResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Listen(self._session, self._host, self._interceptor) # type: ignore - - @property - def partition_query(self) -> Callable[ - [firestore.PartitionQueryRequest], - firestore.PartitionQueryResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._PartitionQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def rollback(self) -> Callable[ - [firestore.RollbackRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Rollback(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_aggregation_query(self) -> Callable[ - [firestore.RunAggregationQueryRequest], - firestore.RunAggregationQueryResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunAggregationQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_query(self) -> Callable[ - [firestore.RunQueryRequest], - firestore.RunQueryResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_document(self) -> Callable[ - [firestore.UpdateDocumentRequest], - gf_document.Document]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore - - @property - def write(self) -> Callable[ - [firestore.WriteRequest], - firestore.WriteResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Write(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(FirestoreRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/databases/*/operations/*}:cancel', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(FirestoreRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/databases/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(FirestoreRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(FirestoreRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*}/operations', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'FirestoreRestTransport', -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/__init__.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/__init__.py deleted file mode 100644 index 772ccce027..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/__init__.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .aggregation_result import ( - AggregationResult, -) -from .bloom_filter import ( - BitSequence, - BloomFilter, -) -from .common import ( - DocumentMask, - Precondition, - TransactionOptions, -) -from .document import ( - ArrayValue, - Document, - MapValue, - Value, -) -from .firestore import ( - BatchGetDocumentsRequest, - BatchGetDocumentsResponse, - BatchWriteRequest, - BatchWriteResponse, - BeginTransactionRequest, - BeginTransactionResponse, - CommitRequest, - CommitResponse, - CreateDocumentRequest, - DeleteDocumentRequest, - GetDocumentRequest, - ListCollectionIdsRequest, - ListCollectionIdsResponse, - ListDocumentsRequest, - ListDocumentsResponse, - ListenRequest, - ListenResponse, - PartitionQueryRequest, - PartitionQueryResponse, - RollbackRequest, - RunAggregationQueryRequest, - RunAggregationQueryResponse, - RunQueryRequest, - RunQueryResponse, - Target, - TargetChange, - UpdateDocumentRequest, - WriteRequest, - WriteResponse, -) -from .query import ( - Cursor, - StructuredAggregationQuery, - StructuredQuery, -) -from .query_profile import ( - ExecutionStats, - ExplainMetrics, - ExplainOptions, - PlanSummary, -) -from .write import ( - DocumentChange, - DocumentDelete, - DocumentRemove, - DocumentTransform, - ExistenceFilter, - Write, - WriteResult, -) - -__all__ = ( - 'AggregationResult', - 'BitSequence', - 'BloomFilter', - 'DocumentMask', - 'Precondition', - 'TransactionOptions', - 'ArrayValue', - 'Document', - 'MapValue', - 'Value', - 'BatchGetDocumentsRequest', - 'BatchGetDocumentsResponse', - 'BatchWriteRequest', - 'BatchWriteResponse', - 'BeginTransactionRequest', - 'BeginTransactionResponse', - 'CommitRequest', - 'CommitResponse', - 'CreateDocumentRequest', - 'DeleteDocumentRequest', - 'GetDocumentRequest', - 'ListCollectionIdsRequest', - 'ListCollectionIdsResponse', - 'ListDocumentsRequest', - 'ListDocumentsResponse', - 'ListenRequest', - 'ListenResponse', - 'PartitionQueryRequest', - 'PartitionQueryResponse', - 'RollbackRequest', - 'RunAggregationQueryRequest', - 'RunAggregationQueryResponse', - 'RunQueryRequest', - 'RunQueryResponse', - 'Target', - 'TargetChange', - 'UpdateDocumentRequest', - 'WriteRequest', - 'WriteResponse', - 'Cursor', - 'StructuredAggregationQuery', - 'StructuredQuery', - 'ExecutionStats', - 'ExplainMetrics', - 'ExplainOptions', - 'PlanSummary', - 'DocumentChange', - 'DocumentDelete', - 'DocumentRemove', - 'DocumentTransform', - 'ExistenceFilter', - 'Write', - 'WriteResult', -) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/aggregation_result.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/aggregation_result.py deleted file mode 100644 index fac23cb061..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/aggregation_result.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import document - - -__protobuf__ = proto.module( - package='google.firestore.v1', - manifest={ - 'AggregationResult', - }, -) - - -class AggregationResult(proto.Message): - r"""The result of a single bucket from a Firestore aggregation query. - - The keys of ``aggregate_fields`` are the same for all results in an - aggregation query, unlike document queries which can have different - fields present for each result. - - Attributes: - aggregate_fields (MutableMapping[str, google.cloud.firestore_v1.types.Value]): - The result of the aggregation functions, ex: - ``COUNT(*) AS total_docs``. - - The key is the - [alias][google.firestore.v1.StructuredAggregationQuery.Aggregation.alias] - assigned to the aggregation function on input and the size - of this map equals the number of aggregation functions in - the query. - """ - - aggregate_fields: MutableMapping[str, document.Value] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message=document.Value, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/bloom_filter.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/bloom_filter.py deleted file mode 100644 index 592c1d6aa9..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/bloom_filter.py +++ /dev/null @@ -1,110 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.v1', - manifest={ - 'BitSequence', - 'BloomFilter', - }, -) - - -class BitSequence(proto.Message): - r"""A sequence of bits, encoded in a byte array. - - Each byte in the ``bitmap`` byte array stores 8 bits of the - sequence. The only exception is the last byte, which may store 8 *or - fewer* bits. The ``padding`` defines the number of bits of the last - byte to be ignored as "padding". The values of these "padding" bits - are unspecified and must be ignored. - - To retrieve the first bit, bit 0, calculate: - ``(bitmap[0] & 0x01) != 0``. To retrieve the second bit, bit 1, - calculate: ``(bitmap[0] & 0x02) != 0``. To retrieve the third bit, - bit 2, calculate: ``(bitmap[0] & 0x04) != 0``. To retrieve the - fourth bit, bit 3, calculate: ``(bitmap[0] & 0x08) != 0``. To - retrieve bit n, calculate: - ``(bitmap[n / 8] & (0x01 << (n % 8))) != 0``. - - The "size" of a ``BitSequence`` (the number of bits it contains) is - calculated by this formula: ``(bitmap.length * 8) - padding``. - - Attributes: - bitmap (bytes): - The bytes that encode the bit sequence. - May have a length of zero. - padding (int): - The number of bits of the last byte in ``bitmap`` to ignore - as "padding". If the length of ``bitmap`` is zero, then this - value must be ``0``. Otherwise, this value must be between 0 - and 7, inclusive. - """ - - bitmap: bytes = proto.Field( - proto.BYTES, - number=1, - ) - padding: int = proto.Field( - proto.INT32, - number=2, - ) - - -class BloomFilter(proto.Message): - r"""A bloom filter (https://en.wikipedia.org/wiki/Bloom_filter). - - The bloom filter hashes the entries with MD5 and treats the - resulting 128-bit hash as 2 distinct 64-bit hash values, interpreted - as unsigned integers using 2's complement encoding. - - These two hash values, named ``h1`` and ``h2``, are then used to - compute the ``hash_count`` hash values using the formula, starting - at ``i=0``: - - :: - - h(i) = h1 + (i * h2) - - These resulting values are then taken modulo the number of bits in - the bloom filter to get the bits of the bloom filter to test for the - given entry. - - Attributes: - bits (google.cloud.firestore_v1.types.BitSequence): - The bloom filter data. - hash_count (int): - The number of hashes used by the algorithm. - """ - - bits: 'BitSequence' = proto.Field( - proto.MESSAGE, - number=1, - message='BitSequence', - ) - hash_count: int = proto.Field( - proto.INT32, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/common.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/common.py deleted file mode 100644 index 674bc6515e..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/common.py +++ /dev/null @@ -1,172 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.v1', - manifest={ - 'DocumentMask', - 'Precondition', - 'TransactionOptions', - }, -) - - -class DocumentMask(proto.Message): - r"""A set of field paths on a document. Used to restrict a get or update - operation on a document to a subset of its fields. This is different - from standard field masks, as this is always scoped to a - [Document][google.firestore.v1.Document], and takes in account the - dynamic nature of [Value][google.firestore.v1.Value]. - - Attributes: - field_paths (MutableSequence[str]): - The list of field paths in the mask. See - [Document.fields][google.firestore.v1.Document.fields] for a - field path syntax reference. - """ - - field_paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class Precondition(proto.Message): - r"""A precondition on a document, used for conditional - operations. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - exists (bool): - When set to ``true``, the target document must exist. When - set to ``false``, the target document must not exist. - - This field is a member of `oneof`_ ``condition_type``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - When set, the target document must exist and - have been last updated at that time. Timestamp - must be microsecond aligned. - - This field is a member of `oneof`_ ``condition_type``. - """ - - exists: bool = proto.Field( - proto.BOOL, - number=1, - oneof='condition_type', - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - oneof='condition_type', - message=timestamp_pb2.Timestamp, - ) - - -class TransactionOptions(proto.Message): - r"""Options for creating a new transaction. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - read_only (google.cloud.firestore_v1.types.TransactionOptions.ReadOnly): - The transaction can only be used for read - operations. - - This field is a member of `oneof`_ ``mode``. - read_write (google.cloud.firestore_v1.types.TransactionOptions.ReadWrite): - The transaction can be used for both read and - write operations. - - This field is a member of `oneof`_ ``mode``. - """ - - class ReadWrite(proto.Message): - r"""Options for a transaction that can be used to read and write - documents. - Firestore does not allow 3rd party auth requests to create - read-write. transactions. - - Attributes: - retry_transaction (bytes): - An optional transaction to retry. - """ - - retry_transaction: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - class ReadOnly(proto.Message): - r"""Options for a transaction that can only be used to read - documents. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents at the given time. - - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - oneof='consistency_selector', - message=timestamp_pb2.Timestamp, - ) - - read_only: ReadOnly = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=ReadOnly, - ) - read_write: ReadWrite = proto.Field( - proto.MESSAGE, - number=3, - oneof='mode', - message=ReadWrite, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/document.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/document.py deleted file mode 100644 index a60b6609f7..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/document.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import latlng_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.v1', - manifest={ - 'Document', - 'Value', - 'ArrayValue', - 'MapValue', - }, -) - - -class Document(proto.Message): - r"""A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - Attributes: - name (str): - The resource name of the document, for example - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - fields (MutableMapping[str, google.cloud.firestore_v1.types.Value]): - The document's fields. - - The map keys represent field names. - - Field names matching the regular expression ``__.*__`` are - reserved. Reserved field names are forbidden except in - certain documented contexts. The field names, represented as - UTF-8, must not exceed 1,500 bytes and cannot be empty. - - Field paths may be used in other contexts to refer to - structured fields defined here. For ``map_value``, the field - path is represented by a dot-delimited (``.``) string of - segments. Each segment is either a simple field name - (defined below) or a quoted field name. For example, the - structured field - ``"foo" : { map_value: { "x&y" : { string_value: "hello" }}}`` - would be represented by the field path - :literal:`foo.`x&y\``. - - A simple field name contains only characters ``a`` to ``z``, - ``A`` to ``Z``, ``0`` to ``9``, or ``_``, and must not start - with ``0`` to ``9``. For example, ``foo_bar_17``. - - A quoted field name starts and ends with :literal:`\`` and - may contain any character. Some characters, including - :literal:`\``, must be escaped using a ``\``. For example, - :literal:`\`x&y\`` represents ``x&y`` and - :literal:`\`bak\`tik\`` represents :literal:`bak`tik`. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the document was created. - - This value increases monotonically when a document is - deleted then recreated. It can also be compared to values - from other documents and the ``read_time`` of a query. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the document was last - changed. - - This value is initially set to the ``create_time`` then - increases monotonically with each change to the document. It - can also be compared to values from other documents and the - ``read_time`` of a query. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - fields: MutableMapping[str, 'Value'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message='Value', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class Value(proto.Message): - r"""A message that can hold any of the supported value types. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - null_value (google.protobuf.struct_pb2.NullValue): - A null value. - - This field is a member of `oneof`_ ``value_type``. - boolean_value (bool): - A boolean value. - - This field is a member of `oneof`_ ``value_type``. - integer_value (int): - An integer value. - - This field is a member of `oneof`_ ``value_type``. - double_value (float): - A double value. - - This field is a member of `oneof`_ ``value_type``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - A timestamp value. - - Precise only to microseconds. When stored, any - additional precision is rounded down. - - This field is a member of `oneof`_ ``value_type``. - string_value (str): - A string value. - - The string, represented as UTF-8, must not - exceed 1 MiB - 89 bytes. Only the first 1,500 - bytes of the UTF-8 representation are considered - by queries. - - This field is a member of `oneof`_ ``value_type``. - bytes_value (bytes): - A bytes value. - - Must not exceed 1 MiB - 89 bytes. - Only the first 1,500 bytes are considered by - queries. - - This field is a member of `oneof`_ ``value_type``. - reference_value (str): - A reference to a document. For example: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This field is a member of `oneof`_ ``value_type``. - geo_point_value (google.type.latlng_pb2.LatLng): - A geo point value representing a point on the - surface of Earth. - - This field is a member of `oneof`_ ``value_type``. - array_value (google.cloud.firestore_v1.types.ArrayValue): - An array value. - - Cannot directly contain another array value, - though can contain an map which contains another - array. - - This field is a member of `oneof`_ ``value_type``. - map_value (google.cloud.firestore_v1.types.MapValue): - A map value. - - This field is a member of `oneof`_ ``value_type``. - """ - - null_value: struct_pb2.NullValue = proto.Field( - proto.ENUM, - number=11, - oneof='value_type', - enum=struct_pb2.NullValue, - ) - boolean_value: bool = proto.Field( - proto.BOOL, - number=1, - oneof='value_type', - ) - integer_value: int = proto.Field( - proto.INT64, - number=2, - oneof='value_type', - ) - double_value: float = proto.Field( - proto.DOUBLE, - number=3, - oneof='value_type', - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - oneof='value_type', - message=timestamp_pb2.Timestamp, - ) - string_value: str = proto.Field( - proto.STRING, - number=17, - oneof='value_type', - ) - bytes_value: bytes = proto.Field( - proto.BYTES, - number=18, - oneof='value_type', - ) - reference_value: str = proto.Field( - proto.STRING, - number=5, - oneof='value_type', - ) - geo_point_value: latlng_pb2.LatLng = proto.Field( - proto.MESSAGE, - number=8, - oneof='value_type', - message=latlng_pb2.LatLng, - ) - array_value: 'ArrayValue' = proto.Field( - proto.MESSAGE, - number=9, - oneof='value_type', - message='ArrayValue', - ) - map_value: 'MapValue' = proto.Field( - proto.MESSAGE, - number=6, - oneof='value_type', - message='MapValue', - ) - - -class ArrayValue(proto.Message): - r"""An array value. - - Attributes: - values (MutableSequence[google.cloud.firestore_v1.types.Value]): - Values in the array. - """ - - values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - - -class MapValue(proto.Message): - r"""A map value. - - Attributes: - fields (MutableMapping[str, google.cloud.firestore_v1.types.Value]): - The map's fields. - - The map keys represent field names. Field names matching the - regular expression ``__.*__`` are reserved. Reserved field - names are forbidden except in certain documented contexts. - The map keys, represented as UTF-8, must not exceed 1,500 - bytes and cannot be empty. - """ - - fields: MutableMapping[str, 'Value'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message='Value', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/firestore.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/firestore.py deleted file mode 100644 index 898b12f7b4..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/firestore.py +++ /dev/null @@ -1,1758 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import aggregation_result -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import query as gf_query -from google.cloud.firestore_v1.types import query_profile -from google.cloud.firestore_v1.types import write -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.v1', - manifest={ - 'GetDocumentRequest', - 'ListDocumentsRequest', - 'ListDocumentsResponse', - 'CreateDocumentRequest', - 'UpdateDocumentRequest', - 'DeleteDocumentRequest', - 'BatchGetDocumentsRequest', - 'BatchGetDocumentsResponse', - 'BeginTransactionRequest', - 'BeginTransactionResponse', - 'CommitRequest', - 'CommitResponse', - 'RollbackRequest', - 'RunQueryRequest', - 'RunQueryResponse', - 'RunAggregationQueryRequest', - 'RunAggregationQueryResponse', - 'PartitionQueryRequest', - 'PartitionQueryResponse', - 'WriteRequest', - 'WriteResponse', - 'ListenRequest', - 'ListenResponse', - 'Target', - 'TargetChange', - 'ListCollectionIdsRequest', - 'ListCollectionIdsResponse', - 'BatchWriteRequest', - 'BatchWriteResponse', - }, -) - - -class GetDocumentRequest(proto.Message): - r"""The request for - [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Required. The resource name of the Document to get. In the - format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to return. If not set, returns all - fields. - If the document has a field that is not present - in this mask, that field will not be returned in - the response. - transaction (bytes): - Reads the document in a transaction. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads the version of the document at the - given time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=2, - message=common.DocumentMask, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=3, - oneof='consistency_selector', - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - oneof='consistency_selector', - message=timestamp_pb2.Timestamp, - ) - - -class ListDocumentsRequest(proto.Message): - r"""The request for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - collection_id (str): - Optional. The collection ID, relative to ``parent``, to - list. - - For example: ``chatrooms`` or ``messages``. - - This is optional, and when not provided, Firestore will list - documents from all collections under the provided - ``parent``. - page_size (int): - Optional. The maximum number of documents to - return in a single response. - Firestore may return fewer than this value. - page_token (str): - Optional. A page token, received from a previous - ``ListDocuments`` response. - - Provide this to retrieve the subsequent page. When - paginating, all other parameters (with the exception of - ``page_size``) must match the values set in the request that - generated the page token. - order_by (str): - Optional. The optional ordering of the documents to return. - - For example: ``priority desc, __name__ desc``. - - This mirrors the - [``ORDER BY``][google.firestore.v1.StructuredQuery.order_by] - used in Firestore queries but in a string representation. - When absent, documents are ordered based on - ``__name__ ASC``. - mask (google.cloud.firestore_v1.types.DocumentMask): - Optional. The fields to return. If not set, - returns all fields. - If a document has a field that is not present in - this mask, that field will not be returned in - the response. - transaction (bytes): - Perform the read as part of an already active - transaction. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Perform the read at the provided time. - - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - show_missing (bool): - If the list should show missing documents. - - A document is missing if it does not exist, but there are - sub-documents nested underneath it. When true, such missing - documents will be returned with a key but will not have - fields, - [``create_time``][google.firestore.v1.Document.create_time], - or - [``update_time``][google.firestore.v1.Document.update_time] - set. - - Requests with ``show_missing`` may not specify ``where`` or - ``order_by``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - collection_id: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=6, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=7, - message=common.DocumentMask, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=8, - oneof='consistency_selector', - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - oneof='consistency_selector', - message=timestamp_pb2.Timestamp, - ) - show_missing: bool = proto.Field( - proto.BOOL, - number=12, - ) - - -class ListDocumentsResponse(proto.Message): - r"""The response for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - Attributes: - documents (MutableSequence[google.cloud.firestore_v1.types.Document]): - The Documents found. - next_page_token (str): - A token to retrieve the next page of - documents. - If this field is omitted, there are no - subsequent pages. - """ - - @property - def raw_page(self): - return self - - documents: MutableSequence[gf_document.Document] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gf_document.Document, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDocumentRequest(proto.Message): - r"""The request for - [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - - Attributes: - parent (str): - Required. The parent resource. For example: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}`` - collection_id (str): - Required. The collection ID, relative to ``parent``, to - list. For example: ``chatrooms``. - document_id (str): - The client-assigned document ID to use for - this document. - Optional. If not specified, an ID will be - assigned by the service. - document (google.cloud.firestore_v1.types.Document): - Required. The document to create. ``name`` must not be set. - mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to return. If not set, returns all - fields. - If the document has a field that is not present - in this mask, that field will not be returned in - the response. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - collection_id: str = proto.Field( - proto.STRING, - number=2, - ) - document_id: str = proto.Field( - proto.STRING, - number=3, - ) - document: gf_document.Document = proto.Field( - proto.MESSAGE, - number=4, - message=gf_document.Document, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=5, - message=common.DocumentMask, - ) - - -class UpdateDocumentRequest(proto.Message): - r"""The request for - [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. - - Attributes: - document (google.cloud.firestore_v1.types.Document): - Required. The updated document. - Creates the document if it does not already - exist. - update_mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to update. - None of the field paths in the mask may contain - a reserved name. - - If the document exists on the server and has - fields not referenced in the mask, they are left - unchanged. - Fields referenced in the mask, but not present - in the input document, are deleted from the - document on the server. - mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to return. If not set, returns all - fields. - If the document has a field that is not present - in this mask, that field will not be returned in - the response. - current_document (google.cloud.firestore_v1.types.Precondition): - An optional precondition on the document. - The request will fail if this is set and not met - by the target document. - """ - - document: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - message=gf_document.Document, - ) - update_mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=2, - message=common.DocumentMask, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=3, - message=common.DocumentMask, - ) - current_document: common.Precondition = proto.Field( - proto.MESSAGE, - number=4, - message=common.Precondition, - ) - - -class DeleteDocumentRequest(proto.Message): - r"""The request for - [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. - - Attributes: - name (str): - Required. The resource name of the Document to delete. In - the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - current_document (google.cloud.firestore_v1.types.Precondition): - An optional precondition on the document. - The request will fail if this is set and not met - by the target document. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - current_document: common.Precondition = proto.Field( - proto.MESSAGE, - number=2, - message=common.Precondition, - ) - - -class BatchGetDocumentsRequest(proto.Message): - r"""The request for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - documents (MutableSequence[str]): - The names of the documents to retrieve. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - The request will fail if any of the document is not a child - resource of the given ``database``. Duplicate names will be - elided. - mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to return. If not set, returns all - fields. - If a document has a field that is not present in - this mask, that field will not be returned in - the response. - transaction (bytes): - Reads documents in a transaction. - - This field is a member of `oneof`_ ``consistency_selector``. - new_transaction (google.cloud.firestore_v1.types.TransactionOptions): - Starts a new transaction and reads the - documents. Defaults to a read-only transaction. - The new transaction ID will be returned as the - first response in the stream. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents as they were at the given - time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - documents: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=3, - message=common.DocumentMask, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=4, - oneof='consistency_selector', - ) - new_transaction: common.TransactionOptions = proto.Field( - proto.MESSAGE, - number=5, - oneof='consistency_selector', - message=common.TransactionOptions, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - oneof='consistency_selector', - message=timestamp_pb2.Timestamp, - ) - - -class BatchGetDocumentsResponse(proto.Message): - r"""The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - found (google.cloud.firestore_v1.types.Document): - A document that was requested. - - This field is a member of `oneof`_ ``result``. - missing (str): - A document name that was requested but does not exist. In - the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This field is a member of `oneof`_ ``result``. - transaction (bytes): - The transaction that was started as part of this request. - Will only be set in the first response, and only if - [BatchGetDocumentsRequest.new_transaction][google.firestore.v1.BatchGetDocumentsRequest.new_transaction] - was set in the request. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the document was read. This may be - monotically increasing, in this case the previous documents - in the result stream are guaranteed not to have changed - between their read_time and this one. - """ - - found: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - oneof='result', - message=gf_document.Document, - ) - missing: str = proto.Field( - proto.STRING, - number=2, - oneof='result', - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=3, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class BeginTransactionRequest(proto.Message): - r"""The request for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - options (google.cloud.firestore_v1.types.TransactionOptions): - The options for the transaction. - Defaults to a read-write transaction. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - options: common.TransactionOptions = proto.Field( - proto.MESSAGE, - number=2, - message=common.TransactionOptions, - ) - - -class BeginTransactionResponse(proto.Message): - r"""The response for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - Attributes: - transaction (bytes): - The transaction that was started. - """ - - transaction: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class CommitRequest(proto.Message): - r"""The request for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - writes (MutableSequence[google.cloud.firestore_v1.types.Write]): - The writes to apply. - - Always executed atomically and in order. - transaction (bytes): - If set, applies all writes in this - transaction, and commits it. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - writes: MutableSequence[write.Write] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=write.Write, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=3, - ) - - -class CommitResponse(proto.Message): - r"""The response for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - Attributes: - write_results (MutableSequence[google.cloud.firestore_v1.types.WriteResult]): - The result of applying the writes. - - This i-th write result corresponds to the i-th - write in the request. - commit_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the commit occurred. Any read with an - equal or greater ``read_time`` is guaranteed to see the - effects of the commit. - """ - - write_results: MutableSequence[write.WriteResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=write.WriteResult, - ) - commit_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class RollbackRequest(proto.Message): - r"""The request for - [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - transaction (bytes): - Required. The transaction to roll back. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class RunQueryRequest(proto.Message): - r"""The request for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - structured_query (google.cloud.firestore_v1.types.StructuredQuery): - A structured query. - - This field is a member of `oneof`_ ``query_type``. - transaction (bytes): - Run the query within an already active - transaction. - The value here is the opaque transaction ID to - execute the query in. - - This field is a member of `oneof`_ ``consistency_selector``. - new_transaction (google.cloud.firestore_v1.types.TransactionOptions): - Starts a new transaction and reads the - documents. Defaults to a read-only transaction. - The new transaction ID will be returned as the - first response in the stream. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents as they were at the given - time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - explain_options (google.cloud.firestore_v1.types.ExplainOptions): - Optional. Explain options for the query. If - set, additional query statistics will be - returned. If not, only query results will be - returned. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_query: gf_query.StructuredQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof='query_type', - message=gf_query.StructuredQuery, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=5, - oneof='consistency_selector', - ) - new_transaction: common.TransactionOptions = proto.Field( - proto.MESSAGE, - number=6, - oneof='consistency_selector', - message=common.TransactionOptions, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - oneof='consistency_selector', - message=timestamp_pb2.Timestamp, - ) - explain_options: query_profile.ExplainOptions = proto.Field( - proto.MESSAGE, - number=10, - message=query_profile.ExplainOptions, - ) - - -class RunQueryResponse(proto.Message): - r"""The response for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transaction (bytes): - The transaction that was started as part of this request. - Can only be set in the first response, and only if - [RunQueryRequest.new_transaction][google.firestore.v1.RunQueryRequest.new_transaction] - was set in the request. If set, no other fields will be set - in this response. - document (google.cloud.firestore_v1.types.Document): - A query result, not set when reporting - partial progress. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the document was read. This may be - monotonically increasing; in this case, the previous - documents in the result stream are guaranteed not to have - changed between their ``read_time`` and this one. - - If the query returns no results, a response with - ``read_time`` and no ``document`` will be sent, and this - represents the time at which the query was run. - skipped_results (int): - The number of results that have been skipped - due to an offset between the last response and - the current response. - done (bool): - If present, Firestore has completely finished - the request and no more documents will be - returned. - - This field is a member of `oneof`_ ``continuation_selector``. - explain_metrics (google.cloud.firestore_v1.types.ExplainMetrics): - Query explain metrics. This is only present when the - [RunQueryRequest.explain_options][google.firestore.v1.RunQueryRequest.explain_options] - is provided, and it is sent only once with the last response - in the stream. - """ - - transaction: bytes = proto.Field( - proto.BYTES, - number=2, - ) - document: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - message=gf_document.Document, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - skipped_results: int = proto.Field( - proto.INT32, - number=4, - ) - done: bool = proto.Field( - proto.BOOL, - number=6, - oneof='continuation_selector', - ) - explain_metrics: query_profile.ExplainMetrics = proto.Field( - proto.MESSAGE, - number=11, - message=query_profile.ExplainMetrics, - ) - - -class RunAggregationQueryRequest(proto.Message): - r"""The request for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - structured_aggregation_query (google.cloud.firestore_v1.types.StructuredAggregationQuery): - An aggregation query. - - This field is a member of `oneof`_ ``query_type``. - transaction (bytes): - Run the aggregation within an already active - transaction. - The value here is the opaque transaction ID to - execute the query in. - - This field is a member of `oneof`_ ``consistency_selector``. - new_transaction (google.cloud.firestore_v1.types.TransactionOptions): - Starts a new transaction as part of the - query, defaulting to read-only. - The new transaction ID will be returned as the - first response in the stream. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Executes the query at the given timestamp. - - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - explain_options (google.cloud.firestore_v1.types.ExplainOptions): - Optional. Explain options for the query. If - set, additional query statistics will be - returned. If not, only query results will be - returned. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_aggregation_query: gf_query.StructuredAggregationQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof='query_type', - message=gf_query.StructuredAggregationQuery, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=4, - oneof='consistency_selector', - ) - new_transaction: common.TransactionOptions = proto.Field( - proto.MESSAGE, - number=5, - oneof='consistency_selector', - message=common.TransactionOptions, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - oneof='consistency_selector', - message=timestamp_pb2.Timestamp, - ) - explain_options: query_profile.ExplainOptions = proto.Field( - proto.MESSAGE, - number=8, - message=query_profile.ExplainOptions, - ) - - -class RunAggregationQueryResponse(proto.Message): - r"""The response for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - Attributes: - result (google.cloud.firestore_v1.types.AggregationResult): - A single aggregation result. - - Not present when reporting partial progress. - transaction (bytes): - The transaction that was started as part of - this request. - Only present on the first response when the - request requested to start a new transaction. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the aggregate result was computed. This is - always monotonically increasing; in this case, the previous - AggregationResult in the result stream are guaranteed not to - have changed between their ``read_time`` and this one. - - If the query returns no results, a response with - ``read_time`` and no ``result`` will be sent, and this - represents the time at which the query was run. - explain_metrics (google.cloud.firestore_v1.types.ExplainMetrics): - Query explain metrics. This is only present when the - [RunAggregationQueryRequest.explain_options][google.firestore.v1.RunAggregationQueryRequest.explain_options] - is provided, and it is sent only once with the last response - in the stream. - """ - - result: aggregation_result.AggregationResult = proto.Field( - proto.MESSAGE, - number=1, - message=aggregation_result.AggregationResult, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=2, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - explain_metrics: query_profile.ExplainMetrics = proto.Field( - proto.MESSAGE, - number=10, - message=query_profile.ExplainMetrics, - ) - - -class PartitionQueryRequest(proto.Message): - r"""The request for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents``. - Document resource names are not supported; only database - resource names can be specified. - structured_query (google.cloud.firestore_v1.types.StructuredQuery): - A structured query. - Query must specify collection with all - descendants and be ordered by name ascending. - Other filters, order bys, limits, offsets, and - start/end cursors are not supported. - - This field is a member of `oneof`_ ``query_type``. - partition_count (int): - The desired maximum number of partition - points. The partitions may be returned across - multiple pages of results. The number must be - positive. The actual number of partitions - returned may be fewer. - - For example, this may be set to one fewer than - the number of parallel queries to be run, or in - running a data pipeline job, one fewer than the - number of workers or compute instances - available. - page_token (str): - The ``next_page_token`` value returned from a previous call - to PartitionQuery that may be used to get an additional set - of results. There are no ordering guarantees between sets of - results. Thus, using multiple sets of results will require - merging the different result sets. - - For example, two subsequent calls using a page_token may - return: - - - cursor B, cursor M, cursor Q - - cursor A, cursor U, cursor W - - To obtain a complete result set ordered with respect to the - results of the query supplied to PartitionQuery, the results - sets should be merged: cursor A, cursor B, cursor M, cursor - Q, cursor U, cursor W - page_size (int): - The maximum number of partitions to return in this call, - subject to ``partition_count``. - - For example, if ``partition_count`` = 10 and ``page_size`` = - 8, the first call to PartitionQuery will return up to 8 - partitions and a ``next_page_token`` if more results exist. - A second call to PartitionQuery will return up to 2 - partitions, to complete the total of 10 specified in - ``partition_count``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents as they were at the given - time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_query: gf_query.StructuredQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof='query_type', - message=gf_query.StructuredQuery, - ) - partition_count: int = proto.Field( - proto.INT64, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - page_size: int = proto.Field( - proto.INT32, - number=5, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - oneof='consistency_selector', - message=timestamp_pb2.Timestamp, - ) - - -class PartitionQueryResponse(proto.Message): - r"""The response for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - Attributes: - partitions (MutableSequence[google.cloud.firestore_v1.types.Cursor]): - Partition results. Each partition is a split point that can - be used by RunQuery as a starting or end point for the query - results. The RunQuery requests must be made with the same - query supplied to this PartitionQuery request. The partition - cursors will be ordered according to same ordering as the - results of the query supplied to PartitionQuery. - - For example, if a PartitionQuery request returns partition - cursors A and B, running the following three queries will - return the entire result set of the original query: - - - query, end_at A - - query, start_at A, end_at B - - query, start_at B - - An empty result may indicate that the query has too few - results to be partitioned, or that the query is not yet - supported for partitioning. - next_page_token (str): - A page token that may be used to request an additional set - of results, up to the number specified by - ``partition_count`` in the PartitionQuery request. If blank, - there are no more results. - """ - - @property - def raw_page(self): - return self - - partitions: MutableSequence[gf_query.Cursor] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gf_query.Cursor, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class WriteRequest(proto.Message): - r"""The request for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - The first request creates a stream, or resumes an existing one from - a token. - - When creating a new stream, the server replies with a response - containing only an ID and a token, to use in the next request. - - When resuming a stream, the server first streams any responses later - than the given token, then a response containing only an up-to-date - token, to use in the next request. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. This is - only required in the first message. - stream_id (str): - The ID of the write stream to resume. - This may only be set in the first message. When - left empty, a new write stream will be created. - writes (MutableSequence[google.cloud.firestore_v1.types.Write]): - The writes to apply. - - Always executed atomically and in order. - This must be empty on the first request. - This may be empty on the last request. - This must not be empty on all other requests. - stream_token (bytes): - A stream token that was previously sent by the server. - - The client should set this field to the token from the most - recent [WriteResponse][google.firestore.v1.WriteResponse] it - has received. This acknowledges that the client has received - responses up to this token. After sending this token, - earlier tokens may not be used anymore. - - The server may close the stream if there are too many - unacknowledged responses. - - Leave this field unset when creating a new stream. To resume - a stream at a specific point, set this field and the - ``stream_id`` field. - - Leave this field unset when creating a new stream. - labels (MutableMapping[str, str]): - Labels associated with this write request. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - stream_id: str = proto.Field( - proto.STRING, - number=2, - ) - writes: MutableSequence[write.Write] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=write.Write, - ) - stream_token: bytes = proto.Field( - proto.BYTES, - number=4, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class WriteResponse(proto.Message): - r"""The response for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - Attributes: - stream_id (str): - The ID of the stream. - Only set on the first message, when a new stream - was created. - stream_token (bytes): - A token that represents the position of this - response in the stream. This can be used by a - client to resume the stream at this point. - - This field is always set. - write_results (MutableSequence[google.cloud.firestore_v1.types.WriteResult]): - The result of applying the writes. - - This i-th write result corresponds to the i-th - write in the request. - commit_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the commit occurred. Any read with an - equal or greater ``read_time`` is guaranteed to see the - effects of the write. - """ - - stream_id: str = proto.Field( - proto.STRING, - number=1, - ) - stream_token: bytes = proto.Field( - proto.BYTES, - number=2, - ) - write_results: MutableSequence[write.WriteResult] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=write.WriteResult, - ) - commit_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ListenRequest(proto.Message): - r"""A request for - [Firestore.Listen][google.firestore.v1.Firestore.Listen] - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - add_target (google.cloud.firestore_v1.types.Target): - A target to add to this stream. - - This field is a member of `oneof`_ ``target_change``. - remove_target (int): - The ID of a target to remove from this - stream. - - This field is a member of `oneof`_ ``target_change``. - labels (MutableMapping[str, str]): - Labels associated with this target change. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - add_target: 'Target' = proto.Field( - proto.MESSAGE, - number=2, - oneof='target_change', - message='Target', - ) - remove_target: int = proto.Field( - proto.INT32, - number=3, - oneof='target_change', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - -class ListenResponse(proto.Message): - r"""The response for - [Firestore.Listen][google.firestore.v1.Firestore.Listen]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - target_change (google.cloud.firestore_v1.types.TargetChange): - Targets have changed. - - This field is a member of `oneof`_ ``response_type``. - document_change (google.cloud.firestore_v1.types.DocumentChange): - A [Document][google.firestore.v1.Document] has changed. - - This field is a member of `oneof`_ ``response_type``. - document_delete (google.cloud.firestore_v1.types.DocumentDelete): - A [Document][google.firestore.v1.Document] has been deleted. - - This field is a member of `oneof`_ ``response_type``. - document_remove (google.cloud.firestore_v1.types.DocumentRemove): - A [Document][google.firestore.v1.Document] has been removed - from a target (because it is no longer relevant to that - target). - - This field is a member of `oneof`_ ``response_type``. - filter (google.cloud.firestore_v1.types.ExistenceFilter): - A filter to apply to the set of documents - previously returned for the given target. - - Returned when documents may have been removed - from the given target, but the exact documents - are unknown. - - This field is a member of `oneof`_ ``response_type``. - """ - - target_change: 'TargetChange' = proto.Field( - proto.MESSAGE, - number=2, - oneof='response_type', - message='TargetChange', - ) - document_change: write.DocumentChange = proto.Field( - proto.MESSAGE, - number=3, - oneof='response_type', - message=write.DocumentChange, - ) - document_delete: write.DocumentDelete = proto.Field( - proto.MESSAGE, - number=4, - oneof='response_type', - message=write.DocumentDelete, - ) - document_remove: write.DocumentRemove = proto.Field( - proto.MESSAGE, - number=6, - oneof='response_type', - message=write.DocumentRemove, - ) - filter: write.ExistenceFilter = proto.Field( - proto.MESSAGE, - number=5, - oneof='response_type', - message=write.ExistenceFilter, - ) - - -class Target(proto.Message): - r"""A specification of a set of documents to listen to. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query (google.cloud.firestore_v1.types.Target.QueryTarget): - A target specified by a query. - - This field is a member of `oneof`_ ``target_type``. - documents (google.cloud.firestore_v1.types.Target.DocumentsTarget): - A target specified by a set of document - names. - - This field is a member of `oneof`_ ``target_type``. - resume_token (bytes): - A resume token from a prior - [TargetChange][google.firestore.v1.TargetChange] for an - identical target. - - Using a resume token with a different target is unsupported - and may fail. - - This field is a member of `oneof`_ ``resume_type``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Start listening after a specific ``read_time``. - - The client must know the state of matching documents at this - time. - - This field is a member of `oneof`_ ``resume_type``. - target_id (int): - The target ID that identifies the target on the stream. Must - be a positive number and non-zero. - - If ``target_id`` is 0 (or unspecified), the server will - assign an ID for this target and return that in a - ``TargetChange::ADD`` event. Once a target with - ``target_id=0`` is added, all subsequent targets must also - have ``target_id=0``. If an ``AddTarget`` request with - ``target_id != 0`` is sent to the server after a target with - ``target_id=0`` is added, the server will immediately send a - response with a ``TargetChange::Remove`` event. - - Note that if the client sends multiple ``AddTarget`` - requests without an ID, the order of IDs returned in - ``TargetChage.target_ids`` are undefined. Therefore, clients - should provide a target ID instead of relying on the server - to assign one. - - If ``target_id`` is non-zero, there must not be an existing - active target on this stream with the same ID. - once (bool): - If the target should be removed once it is - current and consistent. - expected_count (google.protobuf.wrappers_pb2.Int32Value): - The number of documents that last matched the query at the - resume token or read time. - - This value is only relevant when a ``resume_type`` is - provided. This value being present and greater than zero - signals that the client wants - ``ExistenceFilter.unchanged_names`` to be included in the - response. - """ - - class DocumentsTarget(proto.Message): - r"""A target specified by a set of documents names. - - Attributes: - documents (MutableSequence[str]): - The names of the documents to retrieve. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - The request will fail if any of the document is not a child - resource of the given ``database``. Duplicate names will be - elided. - """ - - documents: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - class QueryTarget(proto.Message): - r"""A target specified by a query. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - structured_query (google.cloud.firestore_v1.types.StructuredQuery): - A structured query. - - This field is a member of `oneof`_ ``query_type``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_query: gf_query.StructuredQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof='query_type', - message=gf_query.StructuredQuery, - ) - - query: QueryTarget = proto.Field( - proto.MESSAGE, - number=2, - oneof='target_type', - message=QueryTarget, - ) - documents: DocumentsTarget = proto.Field( - proto.MESSAGE, - number=3, - oneof='target_type', - message=DocumentsTarget, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=4, - oneof='resume_type', - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - oneof='resume_type', - message=timestamp_pb2.Timestamp, - ) - target_id: int = proto.Field( - proto.INT32, - number=5, - ) - once: bool = proto.Field( - proto.BOOL, - number=6, - ) - expected_count: wrappers_pb2.Int32Value = proto.Field( - proto.MESSAGE, - number=12, - message=wrappers_pb2.Int32Value, - ) - - -class TargetChange(proto.Message): - r"""Targets being watched have changed. - - Attributes: - target_change_type (google.cloud.firestore_v1.types.TargetChange.TargetChangeType): - The type of change that occurred. - target_ids (MutableSequence[int]): - The target IDs of targets that have changed. - - If empty, the change applies to all targets. - - The order of the target IDs is not defined. - cause (google.rpc.status_pb2.Status): - The error that resulted in this change, if - applicable. - resume_token (bytes): - A token that can be used to resume the stream for the given - ``target_ids``, or all targets if ``target_ids`` is empty. - - Not set on every target change. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The consistent ``read_time`` for the given ``target_ids`` - (omitted when the target_ids are not at a consistent - snapshot). - - The stream is guaranteed to send a ``read_time`` with - ``target_ids`` empty whenever the entire stream reaches a - new consistent snapshot. ADD, CURRENT, and RESET messages - are guaranteed to (eventually) result in a new consistent - snapshot (while NO_CHANGE and REMOVE messages are not). - - For a given stream, ``read_time`` is guaranteed to be - monotonically increasing. - """ - class TargetChangeType(proto.Enum): - r"""The type of change. - - Values: - NO_CHANGE (0): - No change has occurred. Used only to send an updated - ``resume_token``. - ADD (1): - The targets have been added. - REMOVE (2): - The targets have been removed. - CURRENT (3): - The targets reflect all changes committed before the targets - were added to the stream. - - This will be sent after or with a ``read_time`` that is - greater than or equal to the time at which the targets were - added. - - Listeners can wait for this change if read-after-write - semantics are desired. - RESET (4): - The targets have been reset, and a new initial state for the - targets will be returned in subsequent changes. - - After the initial state is complete, ``CURRENT`` will be - returned even if the target was previously indicated to be - ``CURRENT``. - """ - NO_CHANGE = 0 - ADD = 1 - REMOVE = 2 - CURRENT = 3 - RESET = 4 - - target_change_type: TargetChangeType = proto.Field( - proto.ENUM, - number=1, - enum=TargetChangeType, - ) - target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=2, - ) - cause: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=3, - message=status_pb2.Status, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=4, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - - -class ListCollectionIdsRequest(proto.Message): - r"""The request for - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent document. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - page_size (int): - The maximum number of results to return. - page_token (str): - A page token. Must be a value from - [ListCollectionIdsResponse][google.firestore.v1.ListCollectionIdsResponse]. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents as they were at the given - time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - oneof='consistency_selector', - message=timestamp_pb2.Timestamp, - ) - - -class ListCollectionIdsResponse(proto.Message): - r"""The response from - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - Attributes: - collection_ids (MutableSequence[str]): - The collection ids. - next_page_token (str): - A page token that may be used to continue the - list. - """ - - @property - def raw_page(self): - return self - - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class BatchWriteRequest(proto.Message): - r"""The request for - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - writes (MutableSequence[google.cloud.firestore_v1.types.Write]): - The writes to apply. - - Method does not apply writes atomically and does - not guarantee ordering. Each write succeeds or - fails independently. You cannot write to the - same document more than once per request. - labels (MutableMapping[str, str]): - Labels associated with this batch write. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - writes: MutableSequence[write.Write] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=write.Write, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class BatchWriteResponse(proto.Message): - r"""The response from - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - Attributes: - write_results (MutableSequence[google.cloud.firestore_v1.types.WriteResult]): - The result of applying the writes. - - This i-th write result corresponds to the i-th - write in the request. - status (MutableSequence[google.rpc.status_pb2.Status]): - The status of applying the writes. - - This i-th write status corresponds to the i-th - write in the request. - """ - - write_results: MutableSequence[write.WriteResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=write.WriteResult, - ) - status: MutableSequence[status_pb2.Status] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query.py deleted file mode 100644 index 9f3f4615bb..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query.py +++ /dev/null @@ -1,875 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import document -from google.protobuf import wrappers_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.v1', - manifest={ - 'StructuredQuery', - 'StructuredAggregationQuery', - 'Cursor', - }, -) - - -class StructuredQuery(proto.Message): - r"""A Firestore query. - - The query stages are executed in the following order: - - 1. from - 2. where - 3. select - 4. order_by + start_at + end_at - 5. offset - 6. limit - - Attributes: - select (google.cloud.firestore_v1.types.StructuredQuery.Projection): - Optional sub-set of the fields to return. - - This acts as a - [DocumentMask][google.firestore.v1.DocumentMask] over the - documents returned from a query. When not set, assumes that - the caller wants all fields returned. - from_ (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.CollectionSelector]): - The collections to query. - where (google.cloud.firestore_v1.types.StructuredQuery.Filter): - The filter to apply. - order_by (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.Order]): - The order to apply to the query results. - - Firestore allows callers to provide a full ordering, a - partial ordering, or no ordering at all. In all cases, - Firestore guarantees a stable ordering through the following - rules: - - - The ``order_by`` is required to reference all fields used - with an inequality filter. - - All fields that are required to be in the ``order_by`` - but are not already present are appended in - lexicographical ordering of the field name. - - If an order on ``__name__`` is not specified, it is - appended by default. - - Fields are appended with the same sort direction as the last - order specified, or 'ASCENDING' if no order was specified. - For example: - - - ``ORDER BY a`` becomes ``ORDER BY a ASC, __name__ ASC`` - - ``ORDER BY a DESC`` becomes - ``ORDER BY a DESC, __name__ DESC`` - - ``WHERE a > 1`` becomes - ``WHERE a > 1 ORDER BY a ASC, __name__ ASC`` - - ``WHERE __name__ > ... AND a > 1`` becomes - ``WHERE __name__ > ... AND a > 1 ORDER BY a ASC, __name__ ASC`` - start_at (google.cloud.firestore_v1.types.Cursor): - A potential prefix of a position in the result set to start - the query at. - - The ordering of the result set is based on the ``ORDER BY`` - clause of the original query. - - :: - - SELECT * FROM k WHERE a = 1 AND b > 2 ORDER BY b ASC, __name__ ASC; - - This query's results are ordered by - ``(b ASC, __name__ ASC)``. - - Cursors can reference either the full ordering or a prefix - of the location, though it cannot reference more fields than - what are in the provided ``ORDER BY``. - - Continuing off the example above, attaching the following - start cursors will have varying impact: - - - ``START BEFORE (2, /k/123)``: start the query right - before ``a = 1 AND b > 2 AND __name__ > /k/123``. - - ``START AFTER (10)``: start the query right after - ``a = 1 AND b > 10``. - - Unlike ``OFFSET`` which requires scanning over the first N - results to skip, a start cursor allows the query to begin at - a logical position. This position is not required to match - an actual result, it will scan forward from this position to - find the next document. - - Requires: - - - The number of values cannot be greater than the number of - fields specified in the ``ORDER BY`` clause. - end_at (google.cloud.firestore_v1.types.Cursor): - A potential prefix of a position in the result set to end - the query at. - - This is similar to ``START_AT`` but with it controlling the - end position rather than the start position. - - Requires: - - - The number of values cannot be greater than the number of - fields specified in the ``ORDER BY`` clause. - offset (int): - The number of documents to skip before returning the first - result. - - This applies after the constraints specified by the - ``WHERE``, ``START AT``, & ``END AT`` but before the - ``LIMIT`` clause. - - Requires: - - - The value must be greater than or equal to zero if - specified. - limit (google.protobuf.wrappers_pb2.Int32Value): - The maximum number of results to return. - - Applies after all other constraints. - - Requires: - - - The value must be greater than or equal to zero if - specified. - find_nearest (google.cloud.firestore_v1.types.StructuredQuery.FindNearest): - Optional. A potential Nearest Neighbors - Search. - Applies after all other filters and ordering. - - Finds the closest vector embeddings to the given - query vector. - """ - class Direction(proto.Enum): - r"""A sort direction. - - Values: - DIRECTION_UNSPECIFIED (0): - Unspecified. - ASCENDING (1): - Ascending. - DESCENDING (2): - Descending. - """ - DIRECTION_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - class CollectionSelector(proto.Message): - r"""A selection of a collection, such as ``messages as m1``. - - Attributes: - collection_id (str): - The collection ID. - When set, selects only collections with this ID. - all_descendants (bool): - When false, selects only collections that are immediate - children of the ``parent`` specified in the containing - ``RunQueryRequest``. When true, selects all descendant - collections. - """ - - collection_id: str = proto.Field( - proto.STRING, - number=2, - ) - all_descendants: bool = proto.Field( - proto.BOOL, - number=3, - ) - - class Filter(proto.Message): - r"""A filter. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - composite_filter (google.cloud.firestore_v1.types.StructuredQuery.CompositeFilter): - A composite filter. - - This field is a member of `oneof`_ ``filter_type``. - field_filter (google.cloud.firestore_v1.types.StructuredQuery.FieldFilter): - A filter on a document field. - - This field is a member of `oneof`_ ``filter_type``. - unary_filter (google.cloud.firestore_v1.types.StructuredQuery.UnaryFilter): - A filter that takes exactly one argument. - - This field is a member of `oneof`_ ``filter_type``. - """ - - composite_filter: 'StructuredQuery.CompositeFilter' = proto.Field( - proto.MESSAGE, - number=1, - oneof='filter_type', - message='StructuredQuery.CompositeFilter', - ) - field_filter: 'StructuredQuery.FieldFilter' = proto.Field( - proto.MESSAGE, - number=2, - oneof='filter_type', - message='StructuredQuery.FieldFilter', - ) - unary_filter: 'StructuredQuery.UnaryFilter' = proto.Field( - proto.MESSAGE, - number=3, - oneof='filter_type', - message='StructuredQuery.UnaryFilter', - ) - - class CompositeFilter(proto.Message): - r"""A filter that merges multiple other filters using the given - operator. - - Attributes: - op (google.cloud.firestore_v1.types.StructuredQuery.CompositeFilter.Operator): - The operator for combining multiple filters. - filters (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.Filter]): - The list of filters to combine. - - Requires: - - - At least one filter is present. - """ - class Operator(proto.Enum): - r"""A composite filter operator. - - Values: - OPERATOR_UNSPECIFIED (0): - Unspecified. This value must not be used. - AND (1): - Documents are required to satisfy all of the - combined filters. - OR (2): - Documents are required to satisfy at least - one of the combined filters. - """ - OPERATOR_UNSPECIFIED = 0 - AND = 1 - OR = 2 - - op: 'StructuredQuery.CompositeFilter.Operator' = proto.Field( - proto.ENUM, - number=1, - enum='StructuredQuery.CompositeFilter.Operator', - ) - filters: MutableSequence['StructuredQuery.Filter'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='StructuredQuery.Filter', - ) - - class FieldFilter(proto.Message): - r"""A filter on a specific field. - - Attributes: - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to filter by. - op (google.cloud.firestore_v1.types.StructuredQuery.FieldFilter.Operator): - The operator to filter by. - value (google.cloud.firestore_v1.types.Value): - The value to compare to. - """ - class Operator(proto.Enum): - r"""A field filter operator. - - Values: - OPERATOR_UNSPECIFIED (0): - Unspecified. This value must not be used. - LESS_THAN (1): - The given ``field`` is less than the given ``value``. - - Requires: - - - That ``field`` come first in ``order_by``. - LESS_THAN_OR_EQUAL (2): - The given ``field`` is less than or equal to the given - ``value``. - - Requires: - - - That ``field`` come first in ``order_by``. - GREATER_THAN (3): - The given ``field`` is greater than the given ``value``. - - Requires: - - - That ``field`` come first in ``order_by``. - GREATER_THAN_OR_EQUAL (4): - The given ``field`` is greater than or equal to the given - ``value``. - - Requires: - - - That ``field`` come first in ``order_by``. - EQUAL (5): - The given ``field`` is equal to the given ``value``. - NOT_EQUAL (6): - The given ``field`` is not equal to the given ``value``. - - Requires: - - - No other ``NOT_EQUAL``, ``NOT_IN``, ``IS_NOT_NULL``, or - ``IS_NOT_NAN``. - - That ``field`` comes first in the ``order_by``. - ARRAY_CONTAINS (7): - The given ``field`` is an array that contains the given - ``value``. - IN (8): - The given ``field`` is equal to at least one value in the - given array. - - Requires: - - - That ``value`` is a non-empty ``ArrayValue``, subject to - disjunction limits. - - No ``NOT_IN`` filters in the same query. - ARRAY_CONTAINS_ANY (9): - The given ``field`` is an array that contains any of the - values in the given array. - - Requires: - - - That ``value`` is a non-empty ``ArrayValue``, subject to - disjunction limits. - - No other ``ARRAY_CONTAINS_ANY`` filters within the same - disjunction. - - No ``NOT_IN`` filters in the same query. - NOT_IN (10): - The value of the ``field`` is not in the given array. - - Requires: - - - That ``value`` is a non-empty ``ArrayValue`` with at most - 10 values. - - No other ``OR``, ``IN``, ``ARRAY_CONTAINS_ANY``, - ``NOT_IN``, ``NOT_EQUAL``, ``IS_NOT_NULL``, or - ``IS_NOT_NAN``. - - That ``field`` comes first in the ``order_by``. - """ - OPERATOR_UNSPECIFIED = 0 - LESS_THAN = 1 - LESS_THAN_OR_EQUAL = 2 - GREATER_THAN = 3 - GREATER_THAN_OR_EQUAL = 4 - EQUAL = 5 - NOT_EQUAL = 6 - ARRAY_CONTAINS = 7 - IN = 8 - ARRAY_CONTAINS_ANY = 9 - NOT_IN = 10 - - field: 'StructuredQuery.FieldReference' = proto.Field( - proto.MESSAGE, - number=1, - message='StructuredQuery.FieldReference', - ) - op: 'StructuredQuery.FieldFilter.Operator' = proto.Field( - proto.ENUM, - number=2, - enum='StructuredQuery.FieldFilter.Operator', - ) - value: document.Value = proto.Field( - proto.MESSAGE, - number=3, - message=document.Value, - ) - - class UnaryFilter(proto.Message): - r"""A filter with a single operand. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - op (google.cloud.firestore_v1.types.StructuredQuery.UnaryFilter.Operator): - The unary operator to apply. - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to which to apply the operator. - - This field is a member of `oneof`_ ``operand_type``. - """ - class Operator(proto.Enum): - r"""A unary operator. - - Values: - OPERATOR_UNSPECIFIED (0): - Unspecified. This value must not be used. - IS_NAN (2): - The given ``field`` is equal to ``NaN``. - IS_NULL (3): - The given ``field`` is equal to ``NULL``. - IS_NOT_NAN (4): - The given ``field`` is not equal to ``NaN``. - - Requires: - - - No other ``NOT_EQUAL``, ``NOT_IN``, ``IS_NOT_NULL``, or - ``IS_NOT_NAN``. - - That ``field`` comes first in the ``order_by``. - IS_NOT_NULL (5): - The given ``field`` is not equal to ``NULL``. - - Requires: - - - A single ``NOT_EQUAL``, ``NOT_IN``, ``IS_NOT_NULL``, or - ``IS_NOT_NAN``. - - That ``field`` comes first in the ``order_by``. - """ - OPERATOR_UNSPECIFIED = 0 - IS_NAN = 2 - IS_NULL = 3 - IS_NOT_NAN = 4 - IS_NOT_NULL = 5 - - op: 'StructuredQuery.UnaryFilter.Operator' = proto.Field( - proto.ENUM, - number=1, - enum='StructuredQuery.UnaryFilter.Operator', - ) - field: 'StructuredQuery.FieldReference' = proto.Field( - proto.MESSAGE, - number=2, - oneof='operand_type', - message='StructuredQuery.FieldReference', - ) - - class Order(proto.Message): - r"""An order on a field. - - Attributes: - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to order by. - direction (google.cloud.firestore_v1.types.StructuredQuery.Direction): - The direction to order by. Defaults to ``ASCENDING``. - """ - - field: 'StructuredQuery.FieldReference' = proto.Field( - proto.MESSAGE, - number=1, - message='StructuredQuery.FieldReference', - ) - direction: 'StructuredQuery.Direction' = proto.Field( - proto.ENUM, - number=2, - enum='StructuredQuery.Direction', - ) - - class FieldReference(proto.Message): - r"""A reference to a field in a document, ex: ``stats.operations``. - - Attributes: - field_path (str): - A reference to a field in a document. - - Requires: - - - MUST be a dot-delimited (``.``) string of segments, where - each segment conforms to [document field - name][google.firestore.v1.Document.fields] limitations. - """ - - field_path: str = proto.Field( - proto.STRING, - number=2, - ) - - class Projection(proto.Message): - r"""The projection of document's fields to return. - - Attributes: - fields (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.FieldReference]): - The fields to return. - - If empty, all fields are returned. To only return the name - of the document, use ``['__name__']``. - """ - - fields: MutableSequence['StructuredQuery.FieldReference'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='StructuredQuery.FieldReference', - ) - - class FindNearest(proto.Message): - r"""Nearest Neighbors search config. - - Attributes: - vector_field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - Required. An indexed vector field to search upon. Only - documents which contain vectors whose dimensionality match - the query_vector can be returned. - query_vector (google.cloud.firestore_v1.types.Value): - Required. The query vector that we are - searching on. Must be a vector of no more than - 2048 dimensions. - distance_measure (google.cloud.firestore_v1.types.StructuredQuery.FindNearest.DistanceMeasure): - Required. The Distance Measure to use, - required. - limit (google.protobuf.wrappers_pb2.Int32Value): - Required. The number of nearest neighbors to - return. Must be a positive integer of no more - than 1000. - """ - class DistanceMeasure(proto.Enum): - r"""The distance measure to use when comparing vectors. - - Values: - DISTANCE_MEASURE_UNSPECIFIED (0): - Should not be set. - EUCLIDEAN (1): - Measures the EUCLIDEAN distance between the vectors. See - `Euclidean `__ - to learn more - COSINE (2): - Compares vectors based on the angle between them, which - allows you to measure similarity that isn't based on the - vectors magnitude. We recommend using DOT_PRODUCT with unit - normalized vectors instead of COSINE distance, which is - mathematically equivalent with better performance. See - `Cosine - Similarity `__ - to learn more. - DOT_PRODUCT (3): - Similar to cosine but is affected by the magnitude of the - vectors. See `Dot - Product `__ to - learn more. - """ - DISTANCE_MEASURE_UNSPECIFIED = 0 - EUCLIDEAN = 1 - COSINE = 2 - DOT_PRODUCT = 3 - - vector_field: 'StructuredQuery.FieldReference' = proto.Field( - proto.MESSAGE, - number=1, - message='StructuredQuery.FieldReference', - ) - query_vector: document.Value = proto.Field( - proto.MESSAGE, - number=2, - message=document.Value, - ) - distance_measure: 'StructuredQuery.FindNearest.DistanceMeasure' = proto.Field( - proto.ENUM, - number=3, - enum='StructuredQuery.FindNearest.DistanceMeasure', - ) - limit: wrappers_pb2.Int32Value = proto.Field( - proto.MESSAGE, - number=4, - message=wrappers_pb2.Int32Value, - ) - - select: Projection = proto.Field( - proto.MESSAGE, - number=1, - message=Projection, - ) - from_: MutableSequence[CollectionSelector] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=CollectionSelector, - ) - where: Filter = proto.Field( - proto.MESSAGE, - number=3, - message=Filter, - ) - order_by: MutableSequence[Order] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=Order, - ) - start_at: 'Cursor' = proto.Field( - proto.MESSAGE, - number=7, - message='Cursor', - ) - end_at: 'Cursor' = proto.Field( - proto.MESSAGE, - number=8, - message='Cursor', - ) - offset: int = proto.Field( - proto.INT32, - number=6, - ) - limit: wrappers_pb2.Int32Value = proto.Field( - proto.MESSAGE, - number=5, - message=wrappers_pb2.Int32Value, - ) - find_nearest: FindNearest = proto.Field( - proto.MESSAGE, - number=9, - message=FindNearest, - ) - - -class StructuredAggregationQuery(proto.Message): - r"""Firestore query for running an aggregation over a - [StructuredQuery][google.firestore.v1.StructuredQuery]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - structured_query (google.cloud.firestore_v1.types.StructuredQuery): - Nested structured query. - - This field is a member of `oneof`_ ``query_type``. - aggregations (MutableSequence[google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation]): - Optional. Series of aggregations to apply over the results - of the ``structured_query``. - - Requires: - - - A minimum of one and maximum of five aggregations per - query. - """ - - class Aggregation(proto.Message): - r"""Defines an aggregation that produces a single result. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - count (google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation.Count): - Count aggregator. - - This field is a member of `oneof`_ ``operator``. - sum (google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation.Sum): - Sum aggregator. - - This field is a member of `oneof`_ ``operator``. - avg (google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation.Avg): - Average aggregator. - - This field is a member of `oneof`_ ``operator``. - alias (str): - Optional. Optional name of the field to store the result of - the aggregation into. - - If not provided, Firestore will pick a default name - following the format ``field_``. For - example: - - :: - - AGGREGATE - COUNT_UP_TO(1) AS count_up_to_1, - COUNT_UP_TO(2), - COUNT_UP_TO(3) AS count_up_to_3, - COUNT(*) - OVER ( - ... - ); - - becomes: - - :: - - AGGREGATE - COUNT_UP_TO(1) AS count_up_to_1, - COUNT_UP_TO(2) AS field_1, - COUNT_UP_TO(3) AS count_up_to_3, - COUNT(*) AS field_2 - OVER ( - ... - ); - - Requires: - - - Must be unique across all aggregation aliases. - - Conform to [document field - name][google.firestore.v1.Document.fields] limitations. - """ - - class Count(proto.Message): - r"""Count of documents that match the query. - - The ``COUNT(*)`` aggregation function operates on the entire - document so it does not require a field reference. - - Attributes: - up_to (google.protobuf.wrappers_pb2.Int64Value): - Optional. Optional constraint on the maximum number of - documents to count. - - This provides a way to set an upper bound on the number of - documents to scan, limiting latency, and cost. - - Unspecified is interpreted as no bound. - - High-Level Example: - - :: - - AGGREGATE COUNT_UP_TO(1000) OVER ( SELECT * FROM k ); - - Requires: - - - Must be greater than zero when present. - """ - - up_to: wrappers_pb2.Int64Value = proto.Field( - proto.MESSAGE, - number=1, - message=wrappers_pb2.Int64Value, - ) - - class Sum(proto.Message): - r"""Sum of the values of the requested field. - - - Only numeric values will be aggregated. All non-numeric values - including ``NULL`` are skipped. - - - If the aggregated values contain ``NaN``, returns ``NaN``. - Infinity math follows IEEE-754 standards. - - - If the aggregated value set is empty, returns 0. - - - Returns a 64-bit integer if all aggregated numbers are integers - and the sum result does not overflow. Otherwise, the result is - returned as a double. Note that even if all the aggregated values - are integers, the result is returned as a double if it cannot fit - within a 64-bit signed integer. When this occurs, the returned - value will lose precision. - - - When underflow occurs, floating-point aggregation is - non-deterministic. This means that running the same query - repeatedly without any changes to the underlying values could - produce slightly different results each time. In those cases, - values should be stored as integers over floating-point numbers. - - Attributes: - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to aggregate on. - """ - - field: 'StructuredQuery.FieldReference' = proto.Field( - proto.MESSAGE, - number=1, - message='StructuredQuery.FieldReference', - ) - - class Avg(proto.Message): - r"""Average of the values of the requested field. - - - Only numeric values will be aggregated. All non-numeric values - including ``NULL`` are skipped. - - - If the aggregated values contain ``NaN``, returns ``NaN``. - Infinity math follows IEEE-754 standards. - - - If the aggregated value set is empty, returns ``NULL``. - - - Always returns the result as a double. - - Attributes: - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to aggregate on. - """ - - field: 'StructuredQuery.FieldReference' = proto.Field( - proto.MESSAGE, - number=1, - message='StructuredQuery.FieldReference', - ) - - count: 'StructuredAggregationQuery.Aggregation.Count' = proto.Field( - proto.MESSAGE, - number=1, - oneof='operator', - message='StructuredAggregationQuery.Aggregation.Count', - ) - sum: 'StructuredAggregationQuery.Aggregation.Sum' = proto.Field( - proto.MESSAGE, - number=2, - oneof='operator', - message='StructuredAggregationQuery.Aggregation.Sum', - ) - avg: 'StructuredAggregationQuery.Aggregation.Avg' = proto.Field( - proto.MESSAGE, - number=3, - oneof='operator', - message='StructuredAggregationQuery.Aggregation.Avg', - ) - alias: str = proto.Field( - proto.STRING, - number=7, - ) - - structured_query: 'StructuredQuery' = proto.Field( - proto.MESSAGE, - number=1, - oneof='query_type', - message='StructuredQuery', - ) - aggregations: MutableSequence[Aggregation] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Aggregation, - ) - - -class Cursor(proto.Message): - r"""A position in a query result set. - - Attributes: - values (MutableSequence[google.cloud.firestore_v1.types.Value]): - The values that represent a position, in the - order they appear in the order by clause of a - query. - - Can contain fewer values than specified in the - order by clause. - before (bool): - If the position is just before or just after - the given values, relative to the sort order - defined by the query. - """ - - values: MutableSequence[document.Value] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=document.Value, - ) - before: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query_profile.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query_profile.py deleted file mode 100644 index 201815ecf6..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/query_profile.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.v1', - manifest={ - 'ExplainOptions', - 'ExplainMetrics', - 'PlanSummary', - 'ExecutionStats', - }, -) - - -class ExplainOptions(proto.Message): - r"""Explain options for the query. - - Attributes: - analyze (bool): - Optional. Whether to execute this query. - - When false (the default), the query will be - planned, returning only metrics from the - planning stages. - - When true, the query will be planned and - executed, returning the full query results along - with both planning and execution stage metrics. - """ - - analyze: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class ExplainMetrics(proto.Message): - r"""Explain metrics for the query. - - Attributes: - plan_summary (google.cloud.firestore_v1.types.PlanSummary): - Planning phase information for the query. - execution_stats (google.cloud.firestore_v1.types.ExecutionStats): - Aggregated stats from the execution of the query. Only - present when - [ExplainOptions.analyze][google.firestore.v1.ExplainOptions.analyze] - is set to true. - """ - - plan_summary: 'PlanSummary' = proto.Field( - proto.MESSAGE, - number=1, - message='PlanSummary', - ) - execution_stats: 'ExecutionStats' = proto.Field( - proto.MESSAGE, - number=2, - message='ExecutionStats', - ) - - -class PlanSummary(proto.Message): - r"""Planning phase information for the query. - - Attributes: - indexes_used (MutableSequence[google.protobuf.struct_pb2.Struct]): - The indexes selected for the query. For example: [ - {"query_scope": "Collection", "properties": "(foo ASC, - **name** ASC)"}, {"query_scope": "Collection", "properties": - "(bar ASC, **name** ASC)"} ] - """ - - indexes_used: MutableSequence[struct_pb2.Struct] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct_pb2.Struct, - ) - - -class ExecutionStats(proto.Message): - r"""Execution statistics for the query. - - Attributes: - results_returned (int): - Total number of results returned, including - documents, projections, aggregation results, - keys. - execution_duration (google.protobuf.duration_pb2.Duration): - Total time to execute the query in the - backend. - read_operations (int): - Total billable read operations. - debug_stats (google.protobuf.struct_pb2.Struct): - Debugging statistics from the execution of the query. Note - that the debugging stats are subject to change as Firestore - evolves. It could include: { "indexes_entries_scanned": - "1000", "documents_scanned": "20", "billing_details" : { - "documents_billable": "20", "index_entries_billable": - "1000", "min_query_cost": "0" } } - """ - - results_returned: int = proto.Field( - proto.INT64, - number=1, - ) - execution_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - read_operations: int = proto.Field( - proto.INT64, - number=4, - ) - debug_stats: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=5, - message=struct_pb2.Struct, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/write.py b/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/write.py deleted file mode 100644 index 184c44be10..0000000000 --- a/owl-bot-staging/firestore/v1/google/cloud/firestore_v1/types/write.py +++ /dev/null @@ -1,509 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import bloom_filter -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document as gf_document -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.v1', - manifest={ - 'Write', - 'DocumentTransform', - 'WriteResult', - 'DocumentChange', - 'DocumentDelete', - 'DocumentRemove', - 'ExistenceFilter', - }, -) - - -class Write(proto.Message): - r"""A write on a document. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - update (google.cloud.firestore_v1.types.Document): - A document to write. - - This field is a member of `oneof`_ ``operation``. - delete (str): - A document name to delete. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This field is a member of `oneof`_ ``operation``. - transform (google.cloud.firestore_v1.types.DocumentTransform): - Applies a transformation to a document. - - This field is a member of `oneof`_ ``operation``. - update_mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to update in this write. - - This field can be set only when the operation is ``update``. - If the mask is not set for an ``update`` and the document - exists, any existing data will be overwritten. If the mask - is set and the document on the server has fields not covered - by the mask, they are left unchanged. Fields referenced in - the mask, but not present in the input document, are deleted - from the document on the server. The field paths in this - mask must not contain a reserved field name. - update_transforms (MutableSequence[google.cloud.firestore_v1.types.DocumentTransform.FieldTransform]): - The transforms to perform after update. - - This field can be set only when the operation is ``update``. - If present, this write is equivalent to performing - ``update`` and ``transform`` to the same document atomically - and in order. - current_document (google.cloud.firestore_v1.types.Precondition): - An optional precondition on the document. - - The write will fail if this is set and not met - by the target document. - """ - - update: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - oneof='operation', - message=gf_document.Document, - ) - delete: str = proto.Field( - proto.STRING, - number=2, - oneof='operation', - ) - transform: 'DocumentTransform' = proto.Field( - proto.MESSAGE, - number=6, - oneof='operation', - message='DocumentTransform', - ) - update_mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=3, - message=common.DocumentMask, - ) - update_transforms: MutableSequence['DocumentTransform.FieldTransform'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='DocumentTransform.FieldTransform', - ) - current_document: common.Precondition = proto.Field( - proto.MESSAGE, - number=4, - message=common.Precondition, - ) - - -class DocumentTransform(proto.Message): - r"""A transformation of a document. - - Attributes: - document (str): - The name of the document to transform. - field_transforms (MutableSequence[google.cloud.firestore_v1.types.DocumentTransform.FieldTransform]): - The list of transformations to apply to the - fields of the document, in order. - This must not be empty. - """ - - class FieldTransform(proto.Message): - r"""A transformation of a field of the document. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field_path (str): - The path of the field. See - [Document.fields][google.firestore.v1.Document.fields] for - the field path syntax reference. - set_to_server_value (google.cloud.firestore_v1.types.DocumentTransform.FieldTransform.ServerValue): - Sets the field to the given server value. - - This field is a member of `oneof`_ ``transform_type``. - increment (google.cloud.firestore_v1.types.Value): - Adds the given value to the field's current - value. - This must be an integer or a double value. - If the field is not an integer or double, or if - the field does not yet exist, the transformation - will set the field to the given value. If either - of the given value or the current field value - are doubles, both values will be interpreted as - doubles. Double arithmetic and representation of - double values follow IEEE 754 semantics. If - there is positive/negative integer overflow, the - field is resolved to the largest magnitude - positive/negative integer. - - This field is a member of `oneof`_ ``transform_type``. - maximum (google.cloud.firestore_v1.types.Value): - Sets the field to the maximum of its current - value and the given value. - This must be an integer or a double value. - If the field is not an integer or double, or if - the field does not yet exist, the transformation - will set the field to the given value. If a - maximum operation is applied where the field and - the input value are of mixed types (that is - - one is an integer and one is a double) the field - takes on the type of the larger operand. If the - operands are equivalent (e.g. 3 and 3.0), the - field does not change. 0, 0.0, and -0.0 are all - zero. The maximum of a zero stored value and - zero input value is always the stored value. - The maximum of any numeric value x and NaN is - NaN. - - This field is a member of `oneof`_ ``transform_type``. - minimum (google.cloud.firestore_v1.types.Value): - Sets the field to the minimum of its current - value and the given value. - This must be an integer or a double value. - If the field is not an integer or double, or if - the field does not yet exist, the transformation - will set the field to the input value. If a - minimum operation is applied where the field and - the input value are of mixed types (that is - - one is an integer and one is a double) the field - takes on the type of the smaller operand. If the - operands are equivalent (e.g. 3 and 3.0), the - field does not change. 0, 0.0, and -0.0 are all - zero. The minimum of a zero stored value and - zero input value is always the stored value. - The minimum of any numeric value x and NaN is - NaN. - - This field is a member of `oneof`_ ``transform_type``. - append_missing_elements (google.cloud.firestore_v1.types.ArrayValue): - Append the given elements in order if they are not already - present in the current field value. If the field is not an - array, or if the field does not yet exist, it is first set - to the empty array. - - Equivalent numbers of different types (e.g. 3L and 3.0) are - considered equal when checking if a value is missing. NaN is - equal to NaN, and Null is equal to Null. If the input - contains multiple equivalent values, only the first will be - considered. - - The corresponding transform_result will be the null value. - - This field is a member of `oneof`_ ``transform_type``. - remove_all_from_array (google.cloud.firestore_v1.types.ArrayValue): - Remove all of the given elements from the array in the - field. If the field is not an array, or if the field does - not yet exist, it is set to the empty array. - - Equivalent numbers of the different types (e.g. 3L and 3.0) - are considered equal when deciding whether an element should - be removed. NaN is equal to NaN, and Null is equal to Null. - This will remove all equivalent values if there are - duplicates. - - The corresponding transform_result will be the null value. - - This field is a member of `oneof`_ ``transform_type``. - """ - class ServerValue(proto.Enum): - r"""A value that is calculated by the server. - - Values: - SERVER_VALUE_UNSPECIFIED (0): - Unspecified. This value must not be used. - REQUEST_TIME (1): - The time at which the server processed the - request, with millisecond precision. If used on - multiple fields (same or different documents) in - a transaction, all the fields will get the same - server timestamp. - """ - SERVER_VALUE_UNSPECIFIED = 0 - REQUEST_TIME = 1 - - field_path: str = proto.Field( - proto.STRING, - number=1, - ) - set_to_server_value: 'DocumentTransform.FieldTransform.ServerValue' = proto.Field( - proto.ENUM, - number=2, - oneof='transform_type', - enum='DocumentTransform.FieldTransform.ServerValue', - ) - increment: gf_document.Value = proto.Field( - proto.MESSAGE, - number=3, - oneof='transform_type', - message=gf_document.Value, - ) - maximum: gf_document.Value = proto.Field( - proto.MESSAGE, - number=4, - oneof='transform_type', - message=gf_document.Value, - ) - minimum: gf_document.Value = proto.Field( - proto.MESSAGE, - number=5, - oneof='transform_type', - message=gf_document.Value, - ) - append_missing_elements: gf_document.ArrayValue = proto.Field( - proto.MESSAGE, - number=6, - oneof='transform_type', - message=gf_document.ArrayValue, - ) - remove_all_from_array: gf_document.ArrayValue = proto.Field( - proto.MESSAGE, - number=7, - oneof='transform_type', - message=gf_document.ArrayValue, - ) - - document: str = proto.Field( - proto.STRING, - number=1, - ) - field_transforms: MutableSequence[FieldTransform] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=FieldTransform, - ) - - -class WriteResult(proto.Message): - r"""The result of applying a write. - - Attributes: - update_time (google.protobuf.timestamp_pb2.Timestamp): - The last update time of the document after applying the - write. Not set after a ``delete``. - - If the write did not actually change the document, this will - be the previous update_time. - transform_results (MutableSequence[google.cloud.firestore_v1.types.Value]): - The results of applying each - [DocumentTransform.FieldTransform][google.firestore.v1.DocumentTransform.FieldTransform], - in the same order. - """ - - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - transform_results: MutableSequence[gf_document.Value] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gf_document.Value, - ) - - -class DocumentChange(proto.Message): - r"""A [Document][google.firestore.v1.Document] has changed. - - May be the result of multiple [writes][google.firestore.v1.Write], - including deletes, that ultimately resulted in a new value for the - [Document][google.firestore.v1.Document]. - - Multiple [DocumentChange][google.firestore.v1.DocumentChange] - messages may be returned for the same logical change, if multiple - targets are affected. - - Attributes: - document (google.cloud.firestore_v1.types.Document): - The new state of the - [Document][google.firestore.v1.Document]. - - If ``mask`` is set, contains only fields that were updated - or added. - target_ids (MutableSequence[int]): - A set of target IDs of targets that match - this document. - removed_target_ids (MutableSequence[int]): - A set of target IDs for targets that no - longer match this document. - """ - - document: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - message=gf_document.Document, - ) - target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=5, - ) - removed_target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=6, - ) - - -class DocumentDelete(proto.Message): - r"""A [Document][google.firestore.v1.Document] has been deleted. - - May be the result of multiple [writes][google.firestore.v1.Write], - including updates, the last of which deleted the - [Document][google.firestore.v1.Document]. - - Multiple [DocumentDelete][google.firestore.v1.DocumentDelete] - messages may be returned for the same logical delete, if multiple - targets are affected. - - Attributes: - document (str): - The resource name of the - [Document][google.firestore.v1.Document] that was deleted. - removed_target_ids (MutableSequence[int]): - A set of target IDs for targets that - previously matched this entity. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The read timestamp at which the delete was observed. - - Greater or equal to the ``commit_time`` of the delete. - """ - - document: str = proto.Field( - proto.STRING, - number=1, - ) - removed_target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=6, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class DocumentRemove(proto.Message): - r"""A [Document][google.firestore.v1.Document] has been removed from the - view of the targets. - - Sent if the document is no longer relevant to a target and is out of - view. Can be sent instead of a DocumentDelete or a DocumentChange if - the server can not send the new value of the document. - - Multiple [DocumentRemove][google.firestore.v1.DocumentRemove] - messages may be returned for the same logical write or delete, if - multiple targets are affected. - - Attributes: - document (str): - The resource name of the - [Document][google.firestore.v1.Document] that has gone out - of view. - removed_target_ids (MutableSequence[int]): - A set of target IDs for targets that - previously matched this document. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The read timestamp at which the remove was observed. - - Greater or equal to the ``commit_time`` of the - change/delete/remove. - """ - - document: str = proto.Field( - proto.STRING, - number=1, - ) - removed_target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=2, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ExistenceFilter(proto.Message): - r"""A digest of all the documents that match a given target. - - Attributes: - target_id (int): - The target ID to which this filter applies. - count (int): - The total count of documents that match - [target_id][google.firestore.v1.ExistenceFilter.target_id]. - - If different from the count of documents in the client that - match, the client must manually determine which documents no - longer match the target. - - The client can use the ``unchanged_names`` bloom filter to - assist with this determination by testing ALL the document - names against the filter; if the document name is NOT in the - filter, it means the document no longer matches the target. - unchanged_names (google.cloud.firestore_v1.types.BloomFilter): - A bloom filter that, despite its name, contains the UTF-8 - byte encodings of the resource names of ALL the documents - that match - [target_id][google.firestore.v1.ExistenceFilter.target_id], - in the form - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This bloom filter may be omitted at the server's discretion, - such as if it is deemed that the client will not make use of - it or if it is too computationally expensive to calculate or - transmit. Clients must gracefully handle this field being - absent by falling back to the logic used before this field - existed; that is, re-add the target without a resume token - to figure out which documents in the client's cache are out - of sync. - """ - - target_id: int = proto.Field( - proto.INT32, - number=1, - ) - count: int = proto.Field( - proto.INT32, - number=2, - ) - unchanged_names: bloom_filter.BloomFilter = proto.Field( - proto.MESSAGE, - number=3, - message=bloom_filter.BloomFilter, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore/v1/mypy.ini b/owl-bot-staging/firestore/v1/mypy.ini deleted file mode 100644 index 574c5aed39..0000000000 --- a/owl-bot-staging/firestore/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/firestore/v1/noxfile.py b/owl-bot-staging/firestore/v1/noxfile.py deleted file mode 100644 index 0633d4d81f..0000000000 --- a/owl-bot-staging/firestore/v1/noxfile.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12" -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-firestore' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/firestore_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - -@nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): - """Run the unit test suite against pre-release versions of dependencies.""" - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/firestore_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_async.py deleted file mode 100644 index ca00b0567b..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchGetDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_BatchGetDocuments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_batch_get_documents(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.BatchGetDocumentsRequest( - transaction=b'transaction_blob', - database="database_value", - ) - - # Make the request - stream = await client.batch_get_documents(request=request) - - # Handle the response - async for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_BatchGetDocuments_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_sync.py deleted file mode 100644 index 12ad8bda43..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_get_documents_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchGetDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_BatchGetDocuments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_batch_get_documents(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.BatchGetDocumentsRequest( - transaction=b'transaction_blob', - database="database_value", - ) - - # Make the request - stream = client.batch_get_documents(request=request) - - # Handle the response - for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_BatchGetDocuments_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_async.py deleted file mode 100644 index 092f2ec2a2..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchWrite -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_BatchWrite_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_batch_write(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.BatchWriteRequest( - database="database_value", - ) - - # Make the request - response = await client.batch_write(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_BatchWrite_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_sync.py deleted file mode 100644 index d943de176e..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_batch_write_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchWrite -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_BatchWrite_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_batch_write(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.BatchWriteRequest( - database="database_value", - ) - - # Make the request - response = client.batch_write(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_BatchWrite_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_async.py deleted file mode 100644 index 8f9a2d8378..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BeginTransaction -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_BeginTransaction_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_begin_transaction(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.BeginTransactionRequest( - database="database_value", - ) - - # Make the request - response = await client.begin_transaction(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_BeginTransaction_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_sync.py deleted file mode 100644 index f036f63c25..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_begin_transaction_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BeginTransaction -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_BeginTransaction_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_begin_transaction(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.BeginTransactionRequest( - database="database_value", - ) - - # Make the request - response = client.begin_transaction(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_BeginTransaction_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_async.py deleted file mode 100644 index 7403705ee4..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Commit -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_Commit_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_commit(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.CommitRequest( - database="database_value", - ) - - # Make the request - response = await client.commit(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_Commit_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_sync.py deleted file mode 100644 index 059cf23bca..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_commit_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Commit -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_Commit_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_commit(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.CommitRequest( - database="database_value", - ) - - # Make the request - response = client.commit(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_Commit_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_async.py deleted file mode 100644 index 6ff6596d30..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_CreateDocument_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_create_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.CreateDocumentRequest( - parent="parent_value", - collection_id="collection_id_value", - ) - - # Make the request - response = await client.create_document(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_CreateDocument_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_sync.py deleted file mode 100644 index d26f82921b..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_create_document_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_CreateDocument_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_create_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.CreateDocumentRequest( - parent="parent_value", - collection_id="collection_id_value", - ) - - # Make the request - response = client.create_document(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_CreateDocument_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_async.py deleted file mode 100644 index f7e822edb6..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_DeleteDocument_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_delete_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.DeleteDocumentRequest( - name="name_value", - ) - - # Make the request - await client.delete_document(request=request) - - -# [END firestore_v1_generated_Firestore_DeleteDocument_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_sync.py deleted file mode 100644 index b51f132adc..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_delete_document_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_DeleteDocument_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_delete_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.DeleteDocumentRequest( - name="name_value", - ) - - # Make the request - client.delete_document(request=request) - - -# [END firestore_v1_generated_Firestore_DeleteDocument_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_async.py deleted file mode 100644 index 8436177089..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_GetDocument_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_get_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.GetDocumentRequest( - transaction=b'transaction_blob', - name="name_value", - ) - - # Make the request - response = await client.get_document(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_GetDocument_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_sync.py deleted file mode 100644 index d275afa9e0..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_get_document_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_GetDocument_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_get_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.GetDocumentRequest( - transaction=b'transaction_blob', - name="name_value", - ) - - # Make the request - response = client.get_document(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_GetDocument_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_async.py deleted file mode 100644 index a5bb34a6c5..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListCollectionIds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_ListCollectionIds_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_list_collection_ids(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.ListCollectionIdsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_collection_ids(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END firestore_v1_generated_Firestore_ListCollectionIds_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_sync.py deleted file mode 100644 index 6afc4ecf08..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_collection_ids_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListCollectionIds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_ListCollectionIds_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_list_collection_ids(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.ListCollectionIdsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_collection_ids(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END firestore_v1_generated_Firestore_ListCollectionIds_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_async.py deleted file mode 100644 index d6d73863ae..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_ListDocuments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_list_documents(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.ListDocumentsRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - page_result = client.list_documents(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END firestore_v1_generated_Firestore_ListDocuments_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_sync.py deleted file mode 100644 index 36374e65ac..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_list_documents_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_ListDocuments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_list_documents(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.ListDocumentsRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - page_result = client.list_documents(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END firestore_v1_generated_Firestore_ListDocuments_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_async.py deleted file mode 100644 index 8a2aeca34a..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_async.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Listen -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_Listen_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_listen(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - add_target = firestore_v1.Target() - add_target.resume_token = b'resume_token_blob' - - request = firestore_v1.ListenRequest( - add_target=add_target, - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.ListenRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.listen(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_Listen_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_sync.py deleted file mode 100644 index ad95096b8e..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_listen_sync.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Listen -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_Listen_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_listen(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - add_target = firestore_v1.Target() - add_target.resume_token = b'resume_token_blob' - - request = firestore_v1.ListenRequest( - add_target=add_target, - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.ListenRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.listen(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_Listen_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_async.py deleted file mode 100644 index 252bff079b..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_PartitionQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_partition_query(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.PartitionQueryRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.partition_query(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END firestore_v1_generated_Firestore_PartitionQuery_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_sync.py deleted file mode 100644 index 790057a9f6..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_partition_query_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_PartitionQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_partition_query(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.PartitionQueryRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.partition_query(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END firestore_v1_generated_Firestore_PartitionQuery_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_async.py deleted file mode 100644 index b44f38feba..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Rollback -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_Rollback_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_rollback(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.RollbackRequest( - database="database_value", - transaction=b'transaction_blob', - ) - - # Make the request - await client.rollback(request=request) - - -# [END firestore_v1_generated_Firestore_Rollback_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_sync.py deleted file mode 100644 index 8e11439857..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_rollback_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Rollback -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_Rollback_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_rollback(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.RollbackRequest( - database="database_value", - transaction=b'transaction_blob', - ) - - # Make the request - client.rollback(request=request) - - -# [END firestore_v1_generated_Firestore_Rollback_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_async.py deleted file mode 100644 index 2c82a287ef..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunAggregationQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_RunAggregationQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_run_aggregation_query(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.RunAggregationQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = await client.run_aggregation_query(request=request) - - # Handle the response - async for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_RunAggregationQuery_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_sync.py deleted file mode 100644 index 1f814a5f48..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_aggregation_query_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunAggregationQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_RunAggregationQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_run_aggregation_query(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.RunAggregationQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = client.run_aggregation_query(request=request) - - # Handle the response - for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_RunAggregationQuery_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_async.py deleted file mode 100644 index ea4d24ad69..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_RunQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_run_query(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.RunQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = await client.run_query(request=request) - - # Handle the response - async for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_RunQuery_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_sync.py deleted file mode 100644 index bf51ccc402..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_run_query_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_RunQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_run_query(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.RunQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = client.run_query(request=request) - - # Handle the response - for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_RunQuery_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_async.py deleted file mode 100644 index f5ed989a53..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_UpdateDocument_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_update_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.UpdateDocumentRequest( - ) - - # Make the request - response = await client.update_document(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_UpdateDocument_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_sync.py deleted file mode 100644 index bd8cdc296a..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_update_document_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDocument -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_UpdateDocument_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_update_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.UpdateDocumentRequest( - ) - - # Make the request - response = client.update_document(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_Firestore_UpdateDocument_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_async.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_async.py deleted file mode 100644 index 561098e322..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Write -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_Write_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -async def sample_write(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.WriteRequest( - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.WriteRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.write(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_Write_async] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_sync.py b/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_sync.py deleted file mode 100644 index 1980d0935b..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/firestore_v1_generated_firestore_write_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Write -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore - - -# [START firestore_v1_generated_Firestore_Write_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_v1 - - -def sample_write(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.WriteRequest( - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.WriteRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.write(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - -# [END firestore_v1_generated_Firestore_Write_sync] diff --git a/owl-bot-staging/firestore/v1/samples/generated_samples/snippet_metadata_google.firestore.v1.json b/owl-bot-staging/firestore/v1/samples/generated_samples/snippet_metadata_google.firestore.v1.json deleted file mode 100644 index 90a3e4b880..0000000000 --- a/owl-bot-staging/firestore/v1/samples/generated_samples/snippet_metadata_google.firestore.v1.json +++ /dev/null @@ -1,2523 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.firestore.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-firestore", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.batch_get_documents", - "method": { - "fullName": "google.firestore.v1.Firestore.BatchGetDocuments", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "BatchGetDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.BatchGetDocumentsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]", - "shortName": "batch_get_documents" - }, - "description": "Sample for BatchGetDocuments", - "file": "firestore_v1_generated_firestore_batch_get_documents_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_BatchGetDocuments_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_batch_get_documents_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.batch_get_documents", - "method": { - "fullName": "google.firestore.v1.Firestore.BatchGetDocuments", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "BatchGetDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.BatchGetDocumentsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]", - "shortName": "batch_get_documents" - }, - "description": "Sample for BatchGetDocuments", - "file": "firestore_v1_generated_firestore_batch_get_documents_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_BatchGetDocuments_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_batch_get_documents_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.batch_write", - "method": { - "fullName": "google.firestore.v1.Firestore.BatchWrite", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "BatchWrite" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.BatchWriteRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.BatchWriteResponse", - "shortName": "batch_write" - }, - "description": "Sample for BatchWrite", - "file": "firestore_v1_generated_firestore_batch_write_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_BatchWrite_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_batch_write_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.batch_write", - "method": { - "fullName": "google.firestore.v1.Firestore.BatchWrite", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "BatchWrite" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.BatchWriteRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.BatchWriteResponse", - "shortName": "batch_write" - }, - "description": "Sample for BatchWrite", - "file": "firestore_v1_generated_firestore_batch_write_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_BatchWrite_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_batch_write_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.begin_transaction", - "method": { - "fullName": "google.firestore.v1.Firestore.BeginTransaction", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "BeginTransaction" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.BeginTransactionRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.BeginTransactionResponse", - "shortName": "begin_transaction" - }, - "description": "Sample for BeginTransaction", - "file": "firestore_v1_generated_firestore_begin_transaction_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_BeginTransaction_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_begin_transaction_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.begin_transaction", - "method": { - "fullName": "google.firestore.v1.Firestore.BeginTransaction", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "BeginTransaction" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.BeginTransactionRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.BeginTransactionResponse", - "shortName": "begin_transaction" - }, - "description": "Sample for BeginTransaction", - "file": "firestore_v1_generated_firestore_begin_transaction_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_BeginTransaction_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_begin_transaction_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.commit", - "method": { - "fullName": "google.firestore.v1.Firestore.Commit", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "Commit" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.CommitRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "writes", - "type": "MutableSequence[google.cloud.firestore_v1.types.Write]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.CommitResponse", - "shortName": "commit" - }, - "description": "Sample for Commit", - "file": "firestore_v1_generated_firestore_commit_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_Commit_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_commit_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.commit", - "method": { - "fullName": "google.firestore.v1.Firestore.Commit", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "Commit" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.CommitRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "writes", - "type": "MutableSequence[google.cloud.firestore_v1.types.Write]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.CommitResponse", - "shortName": "commit" - }, - "description": "Sample for Commit", - "file": "firestore_v1_generated_firestore_commit_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_Commit_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_commit_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.create_document", - "method": { - "fullName": "google.firestore.v1.Firestore.CreateDocument", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "CreateDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.CreateDocumentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.Document", - "shortName": "create_document" - }, - "description": "Sample for CreateDocument", - "file": "firestore_v1_generated_firestore_create_document_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_CreateDocument_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_create_document_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.create_document", - "method": { - "fullName": "google.firestore.v1.Firestore.CreateDocument", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "CreateDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.CreateDocumentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.Document", - "shortName": "create_document" - }, - "description": "Sample for CreateDocument", - "file": "firestore_v1_generated_firestore_create_document_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_CreateDocument_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_create_document_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.delete_document", - "method": { - "fullName": "google.firestore.v1.Firestore.DeleteDocument", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "DeleteDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.DeleteDocumentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_document" - }, - "description": "Sample for DeleteDocument", - "file": "firestore_v1_generated_firestore_delete_document_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_DeleteDocument_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_delete_document_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.delete_document", - "method": { - "fullName": "google.firestore.v1.Firestore.DeleteDocument", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "DeleteDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.DeleteDocumentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_document" - }, - "description": "Sample for DeleteDocument", - "file": "firestore_v1_generated_firestore_delete_document_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_DeleteDocument_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_delete_document_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.get_document", - "method": { - "fullName": "google.firestore.v1.Firestore.GetDocument", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "GetDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.GetDocumentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.Document", - "shortName": "get_document" - }, - "description": "Sample for GetDocument", - "file": "firestore_v1_generated_firestore_get_document_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_GetDocument_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_get_document_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.get_document", - "method": { - "fullName": "google.firestore.v1.Firestore.GetDocument", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "GetDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.GetDocumentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.Document", - "shortName": "get_document" - }, - "description": "Sample for GetDocument", - "file": "firestore_v1_generated_firestore_get_document_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_GetDocument_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_get_document_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.list_collection_ids", - "method": { - "fullName": "google.firestore.v1.Firestore.ListCollectionIds", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "ListCollectionIds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.ListCollectionIdsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsAsyncPager", - "shortName": "list_collection_ids" - }, - "description": "Sample for ListCollectionIds", - "file": "firestore_v1_generated_firestore_list_collection_ids_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_ListCollectionIds_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_list_collection_ids_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.list_collection_ids", - "method": { - "fullName": "google.firestore.v1.Firestore.ListCollectionIds", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "ListCollectionIds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.ListCollectionIdsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsPager", - "shortName": "list_collection_ids" - }, - "description": "Sample for ListCollectionIds", - "file": "firestore_v1_generated_firestore_list_collection_ids_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_ListCollectionIds_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_list_collection_ids_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.list_documents", - "method": { - "fullName": "google.firestore.v1.Firestore.ListDocuments", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "ListDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.ListDocumentsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsAsyncPager", - "shortName": "list_documents" - }, - "description": "Sample for ListDocuments", - "file": "firestore_v1_generated_firestore_list_documents_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_ListDocuments_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_list_documents_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.list_documents", - "method": { - "fullName": "google.firestore.v1.Firestore.ListDocuments", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "ListDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.ListDocumentsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsPager", - "shortName": "list_documents" - }, - "description": "Sample for ListDocuments", - "file": "firestore_v1_generated_firestore_list_documents_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_ListDocuments_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_list_documents_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.listen", - "method": { - "fullName": "google.firestore.v1.Firestore.Listen", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "Listen" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.firestore_v1.types.ListenRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.ListenResponse]", - "shortName": "listen" - }, - "description": "Sample for Listen", - "file": "firestore_v1_generated_firestore_listen_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_Listen_async", - "segments": [ - { - "end": 66, - "start": 27, - "type": "FULL" - }, - { - "end": 66, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 59, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 62, - "start": 60, - "type": "REQUEST_EXECUTION" - }, - { - "end": 67, - "start": 63, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_listen_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.listen", - "method": { - "fullName": "google.firestore.v1.Firestore.Listen", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "Listen" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.firestore_v1.types.ListenRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.ListenResponse]", - "shortName": "listen" - }, - "description": "Sample for Listen", - "file": "firestore_v1_generated_firestore_listen_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_Listen_sync", - "segments": [ - { - "end": 66, - "start": 27, - "type": "FULL" - }, - { - "end": 66, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 59, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 62, - "start": 60, - "type": "REQUEST_EXECUTION" - }, - { - "end": 67, - "start": 63, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_listen_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.partition_query", - "method": { - "fullName": "google.firestore.v1.Firestore.PartitionQuery", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "PartitionQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.PartitionQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryAsyncPager", - "shortName": "partition_query" - }, - "description": "Sample for PartitionQuery", - "file": "firestore_v1_generated_firestore_partition_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_PartitionQuery_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_partition_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.partition_query", - "method": { - "fullName": "google.firestore.v1.Firestore.PartitionQuery", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "PartitionQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.PartitionQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryPager", - "shortName": "partition_query" - }, - "description": "Sample for PartitionQuery", - "file": "firestore_v1_generated_firestore_partition_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_PartitionQuery_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_partition_query_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.rollback", - "method": { - "fullName": "google.firestore.v1.Firestore.Rollback", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "Rollback" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.RollbackRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "transaction", - "type": "bytes" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "rollback" - }, - "description": "Sample for Rollback", - "file": "firestore_v1_generated_firestore_rollback_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_Rollback_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_rollback_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.rollback", - "method": { - "fullName": "google.firestore.v1.Firestore.Rollback", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "Rollback" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.RollbackRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "transaction", - "type": "bytes" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "rollback" - }, - "description": "Sample for Rollback", - "file": "firestore_v1_generated_firestore_rollback_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_Rollback_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_rollback_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.run_aggregation_query", - "method": { - "fullName": "google.firestore.v1.Firestore.RunAggregationQuery", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "RunAggregationQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.RunAggregationQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]", - "shortName": "run_aggregation_query" - }, - "description": "Sample for RunAggregationQuery", - "file": "firestore_v1_generated_firestore_run_aggregation_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_RunAggregationQuery_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_run_aggregation_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.run_aggregation_query", - "method": { - "fullName": "google.firestore.v1.Firestore.RunAggregationQuery", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "RunAggregationQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.RunAggregationQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]", - "shortName": "run_aggregation_query" - }, - "description": "Sample for RunAggregationQuery", - "file": "firestore_v1_generated_firestore_run_aggregation_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_RunAggregationQuery_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_run_aggregation_query_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.run_query", - "method": { - "fullName": "google.firestore.v1.Firestore.RunQuery", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "RunQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.RunQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.RunQueryResponse]", - "shortName": "run_query" - }, - "description": "Sample for RunQuery", - "file": "firestore_v1_generated_firestore_run_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_RunQuery_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_run_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.run_query", - "method": { - "fullName": "google.firestore.v1.Firestore.RunQuery", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "RunQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.RunQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.RunQueryResponse]", - "shortName": "run_query" - }, - "description": "Sample for RunQuery", - "file": "firestore_v1_generated_firestore_run_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_RunQuery_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_run_query_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.update_document", - "method": { - "fullName": "google.firestore.v1.Firestore.UpdateDocument", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "UpdateDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.UpdateDocumentRequest" - }, - { - "name": "document", - "type": "google.cloud.firestore_v1.types.Document" - }, - { - "name": "update_mask", - "type": "google.cloud.firestore_v1.types.DocumentMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.Document", - "shortName": "update_document" - }, - "description": "Sample for UpdateDocument", - "file": "firestore_v1_generated_firestore_update_document_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_UpdateDocument_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_update_document_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.update_document", - "method": { - "fullName": "google.firestore.v1.Firestore.UpdateDocument", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "UpdateDocument" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_v1.types.UpdateDocumentRequest" - }, - { - "name": "document", - "type": "google.cloud.firestore_v1.types.Document" - }, - { - "name": "update_mask", - "type": "google.cloud.firestore_v1.types.DocumentMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_v1.types.Document", - "shortName": "update_document" - }, - "description": "Sample for UpdateDocument", - "file": "firestore_v1_generated_firestore_update_document_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_UpdateDocument_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_update_document_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient", - "shortName": "FirestoreAsyncClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreAsyncClient.write", - "method": { - "fullName": "google.firestore.v1.Firestore.Write", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "Write" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.firestore_v1.types.WriteRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.WriteResponse]", - "shortName": "write" - }, - "description": "Sample for Write", - "file": "firestore_v1_generated_firestore_write_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_Write_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_write_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_v1.FirestoreClient", - "shortName": "FirestoreClient" - }, - "fullName": "google.cloud.firestore_v1.FirestoreClient.write", - "method": { - "fullName": "google.firestore.v1.Firestore.Write", - "service": { - "fullName": "google.firestore.v1.Firestore", - "shortName": "Firestore" - }, - "shortName": "Write" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.firestore_v1.types.WriteRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.firestore_v1.types.WriteResponse]", - "shortName": "write" - }, - "description": "Sample for Write", - "file": "firestore_v1_generated_firestore_write_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_Firestore_Write_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_write_sync.py" - } - ] -} diff --git a/owl-bot-staging/firestore/v1/scripts/fixup_firestore_v1_keywords.py b/owl-bot-staging/firestore/v1/scripts/fixup_firestore_v1_keywords.py deleted file mode 100644 index 1b23d34f11..0000000000 --- a/owl-bot-staging/firestore/v1/scripts/fixup_firestore_v1_keywords.py +++ /dev/null @@ -1,191 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class firestoreCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_get_documents': ('database', 'documents', 'mask', 'transaction', 'new_transaction', 'read_time', ), - 'batch_write': ('database', 'writes', 'labels', ), - 'begin_transaction': ('database', 'options', ), - 'commit': ('database', 'writes', 'transaction', ), - 'create_document': ('parent', 'collection_id', 'document', 'document_id', 'mask', ), - 'delete_document': ('name', 'current_document', ), - 'get_document': ('name', 'mask', 'transaction', 'read_time', ), - 'list_collection_ids': ('parent', 'page_size', 'page_token', 'read_time', ), - 'list_documents': ('parent', 'collection_id', 'page_size', 'page_token', 'order_by', 'mask', 'transaction', 'read_time', 'show_missing', ), - 'listen': ('database', 'add_target', 'remove_target', 'labels', ), - 'partition_query': ('parent', 'structured_query', 'partition_count', 'page_token', 'page_size', 'read_time', ), - 'rollback': ('database', 'transaction', ), - 'run_aggregation_query': ('parent', 'structured_aggregation_query', 'transaction', 'new_transaction', 'read_time', 'explain_options', ), - 'run_query': ('parent', 'structured_query', 'transaction', 'new_transaction', 'read_time', 'explain_options', ), - 'update_document': ('document', 'update_mask', 'mask', 'current_document', ), - 'write': ('database', 'stream_id', 'writes', 'stream_token', 'labels', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=firestoreCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the firestore client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/firestore/v1/setup.py b/owl-bot-staging/firestore/v1/setup.py deleted file mode 100644 index 074598b595..0000000000 --- a/owl-bot-staging/firestore/v1/setup.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-firestore' - - -description = "Google Cloud Firestore API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/firestore/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-firestore" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.10.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.11.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.12.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.7.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.7.txt deleted file mode 100644 index b8a550c738..0000000000 --- a/owl-bot-staging/firestore/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.19.5 diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.8.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore/v1/testing/constraints-3.9.txt b/owl-bot-staging/firestore/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore/v1/tests/__init__.py b/owl-bot-staging/firestore/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore/v1/tests/unit/__init__.py b/owl-bot-staging/firestore/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/firestore/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/__init__.py b/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/test_firestore.py b/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/test_firestore.py deleted file mode 100644 index 2ac61081c7..0000000000 --- a/owl-bot-staging/firestore/v1/tests/unit/gapic/firestore_v1/test_firestore.py +++ /dev/null @@ -1,9455 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.firestore_v1.services.firestore import FirestoreAsyncClient -from google.cloud.firestore_v1.services.firestore import FirestoreClient -from google.cloud.firestore_v1.services.firestore import pagers -from google.cloud.firestore_v1.services.firestore import transports -from google.cloud.firestore_v1.types import aggregation_result -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.firestore_v1.types import query -from google.cloud.firestore_v1.types import query_profile -from google.cloud.firestore_v1.types import write as gf_write -from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import latlng_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert FirestoreClient._get_default_mtls_endpoint(None) is None - assert FirestoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert FirestoreClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert FirestoreClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert FirestoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert FirestoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert FirestoreClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert FirestoreClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert FirestoreClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - FirestoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert FirestoreClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert FirestoreClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert FirestoreClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - FirestoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert FirestoreClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert FirestoreClient._get_client_cert_source(None, False) is None - assert FirestoreClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert FirestoreClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert FirestoreClient._get_client_cert_source(None, True) is mock_default_cert_source - assert FirestoreClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(FirestoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreClient)) -@mock.patch.object(FirestoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = FirestoreClient._DEFAULT_UNIVERSE - default_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert FirestoreClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert FirestoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == FirestoreClient.DEFAULT_MTLS_ENDPOINT - assert FirestoreClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert FirestoreClient._get_api_endpoint(None, None, default_universe, "always") == FirestoreClient.DEFAULT_MTLS_ENDPOINT - assert FirestoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == FirestoreClient.DEFAULT_MTLS_ENDPOINT - assert FirestoreClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert FirestoreClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - FirestoreClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert FirestoreClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert FirestoreClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert FirestoreClient._get_universe_domain(None, None) == FirestoreClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - FirestoreClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), - (FirestoreClient, transports.FirestoreRestTransport, "rest"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - -@pytest.mark.parametrize("client_class,transport_name", [ - (FirestoreClient, "grpc"), - (FirestoreAsyncClient, "grpc_asyncio"), - (FirestoreClient, "rest"), -]) -def test_firestore_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'firestore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://firestore.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.FirestoreGrpcTransport, "grpc"), - (transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.FirestoreRestTransport, "rest"), -]) -def test_firestore_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (FirestoreClient, "grpc"), - (FirestoreAsyncClient, "grpc_asyncio"), - (FirestoreClient, "rest"), -]) -def test_firestore_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'firestore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://firestore.googleapis.com' - ) - - -def test_firestore_client_get_transport_class(): - transport = FirestoreClient.get_transport_class() - available_transports = [ - transports.FirestoreGrpcTransport, - transports.FirestoreRestTransport, - ] - assert transport in available_transports - - transport = FirestoreClient.get_transport_class("grpc") - assert transport == transports.FirestoreGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), - (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio"), - (FirestoreClient, transports.FirestoreRestTransport, "rest"), -]) -@mock.patch.object(FirestoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreClient)) -@mock.patch.object(FirestoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAsyncClient)) -def test_firestore_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(FirestoreClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(FirestoreClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", "true"), - (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", "false"), - (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (FirestoreClient, transports.FirestoreRestTransport, "rest", "true"), - (FirestoreClient, transports.FirestoreRestTransport, "rest", "false"), -]) -@mock.patch.object(FirestoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreClient)) -@mock.patch.object(FirestoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_firestore_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - FirestoreClient, FirestoreAsyncClient -]) -@mock.patch.object(FirestoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreClient)) -@mock.patch.object(FirestoreAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreAsyncClient)) -def test_firestore_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - FirestoreClient, FirestoreAsyncClient -]) -@mock.patch.object(FirestoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreClient)) -@mock.patch.object(FirestoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAsyncClient)) -def test_firestore_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = FirestoreClient._DEFAULT_UNIVERSE - default_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), - (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio"), - (FirestoreClient, transports.FirestoreRestTransport, "rest"), -]) -def test_firestore_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", grpc_helpers), - (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (FirestoreClient, transports.FirestoreRestTransport, "rest", None), -]) -def test_firestore_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_firestore_client_client_options_from_dict(): - with mock.patch('google.cloud.firestore_v1.services.firestore.transports.FirestoreGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = FirestoreClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", grpc_helpers), - (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_firestore_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "firestore.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - scopes=None, - default_host="firestore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.GetDocumentRequest, - dict, -]) -def test_get_document(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = document.Document( - name='name_value', - ) - response = client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.GetDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == 'name_value' - - -def test_get_document_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_document), - '__call__') as call: - client.get_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.GetDocumentRequest() - - -def test_get_document_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.GetDocumentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_document), - '__call__') as call: - client.get_document(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.GetDocumentRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_document_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document( - name='name_value', - )) - response = await client.get_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.GetDocumentRequest() - -@pytest.mark.asyncio -async def test_get_document_async(transport: str = 'grpc_asyncio', request_type=firestore.GetDocumentRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document.Document( - name='name_value', - )) - response = await client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.GetDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_document_async_from_dict(): - await test_get_document_async(request_type=dict) - - -def test_get_document_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.GetDocumentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_document), - '__call__') as call: - call.return_value = document.Document() - client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_document_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.GetDocumentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_document), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) - await client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - firestore.ListDocumentsRequest, - dict, -]) -def test_list_documents(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListDocumentsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.ListDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_documents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__') as call: - client.list_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListDocumentsRequest() - - -def test_list_documents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.ListDocumentsRequest( - parent='parent_value', - collection_id='collection_id_value', - page_token='page_token_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__') as call: - client.list_documents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListDocumentsRequest( - parent='parent_value', - collection_id='collection_id_value', - page_token='page_token_value', - order_by='order_by_value', - ) - -@pytest.mark.asyncio -async def test_list_documents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListDocumentsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListDocumentsRequest() - -@pytest.mark.asyncio -async def test_list_documents_async(transport: str = 'grpc_asyncio', request_type=firestore.ListDocumentsRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListDocumentsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.ListDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_documents_async_from_dict(): - await test_list_documents_async(request_type=dict) - - -def test_list_documents_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListDocumentsRequest() - - request.parent = 'parent_value' - request.collection_id = 'collection_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__') as call: - call.return_value = firestore.ListDocumentsResponse() - client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value&collection_id=collection_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_documents_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListDocumentsRequest() - - request.parent = 'parent_value' - request.collection_id = 'collection_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListDocumentsResponse()) - await client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value&collection_id=collection_id_value', - ) in kw['metadata'] - - -def test_list_documents_pager(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token='abc', - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token='def', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token='ghi', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - ('collection_id', ''), - )), - ) - pager = client.list_documents(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) - for i in results) -def test_list_documents_pages(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token='abc', - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token='def', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token='ghi', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - RuntimeError, - ) - pages = list(client.list_documents(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_documents_async_pager(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token='abc', - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token='def', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token='ghi', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_documents(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, document.Document) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_documents_async_pages(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token='abc', - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token='def', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token='ghi', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_documents(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - firestore.UpdateDocumentRequest, - dict, -]) -def test_update_document(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gf_document.Document( - name='name_value', - ) - response = client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.UpdateDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - assert response.name == 'name_value' - - -def test_update_document_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_document), - '__call__') as call: - client.update_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.UpdateDocumentRequest() - - -def test_update_document_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.UpdateDocumentRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_document), - '__call__') as call: - client.update_document(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.UpdateDocumentRequest( - ) - -@pytest.mark.asyncio -async def test_update_document_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gf_document.Document( - name='name_value', - )) - response = await client.update_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.UpdateDocumentRequest() - -@pytest.mark.asyncio -async def test_update_document_async(transport: str = 'grpc_asyncio', request_type=firestore.UpdateDocumentRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gf_document.Document( - name='name_value', - )) - response = await client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.UpdateDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_document_async_from_dict(): - await test_update_document_async(request_type=dict) - - -def test_update_document_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.UpdateDocumentRequest() - - request.document.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_document), - '__call__') as call: - call.return_value = gf_document.Document() - client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'document.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_document_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.UpdateDocumentRequest() - - request.document.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_document), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gf_document.Document()) - await client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'document.name=name_value', - ) in kw['metadata'] - - -def test_update_document_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gf_document.Document() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_document( - document=gf_document.Document(name='name_value'), - update_mask=common.DocumentMask(field_paths=['field_paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = gf_document.Document(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = common.DocumentMask(field_paths=['field_paths_value']) - assert arg == mock_val - - -def test_update_document_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_document( - firestore.UpdateDocumentRequest(), - document=gf_document.Document(name='name_value'), - update_mask=common.DocumentMask(field_paths=['field_paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_document_flattened_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gf_document.Document() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gf_document.Document()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_document( - document=gf_document.Document(name='name_value'), - update_mask=common.DocumentMask(field_paths=['field_paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = gf_document.Document(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = common.DocumentMask(field_paths=['field_paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_document_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_document( - firestore.UpdateDocumentRequest(), - document=gf_document.Document(name='name_value'), - update_mask=common.DocumentMask(field_paths=['field_paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.DeleteDocumentRequest, - dict, -]) -def test_delete_document(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.DeleteDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_document_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_document), - '__call__') as call: - client.delete_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.DeleteDocumentRequest() - - -def test_delete_document_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.DeleteDocumentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_document), - '__call__') as call: - client.delete_document(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.DeleteDocumentRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_document_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.DeleteDocumentRequest() - -@pytest.mark.asyncio -async def test_delete_document_async(transport: str = 'grpc_asyncio', request_type=firestore.DeleteDocumentRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.DeleteDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_document_async_from_dict(): - await test_delete_document_async(request_type=dict) - - -def test_delete_document_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.DeleteDocumentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_document), - '__call__') as call: - call.return_value = None - client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_document_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.DeleteDocumentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_document), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_document_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_document( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_document_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - firestore.DeleteDocumentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_document_flattened_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_document( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_document_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_document( - firestore.DeleteDocumentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.BatchGetDocumentsRequest, - dict, -]) -def test_batch_get_documents(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.BatchGetDocumentsResponse()]) - response = client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.BatchGetDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.BatchGetDocumentsResponse) - - -def test_batch_get_documents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), - '__call__') as call: - client.batch_get_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchGetDocumentsRequest() - - -def test_batch_get_documents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.BatchGetDocumentsRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), - '__call__') as call: - client.batch_get_documents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchGetDocumentsRequest( - database='database_value', - ) - -@pytest.mark.asyncio -async def test_batch_get_documents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.BatchGetDocumentsResponse()]) - response = await client.batch_get_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchGetDocumentsRequest() - -@pytest.mark.asyncio -async def test_batch_get_documents_async(transport: str = 'grpc_asyncio', request_type=firestore.BatchGetDocumentsRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.BatchGetDocumentsResponse()]) - response = await client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.BatchGetDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.BatchGetDocumentsResponse) - - -@pytest.mark.asyncio -async def test_batch_get_documents_async_from_dict(): - await test_batch_get_documents_async(request_type=dict) - - -def test_batch_get_documents_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchGetDocumentsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), - '__call__') as call: - call.return_value = iter([firestore.BatchGetDocumentsResponse()]) - client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_get_documents_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchGetDocumentsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.BatchGetDocumentsResponse()]) - await client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - firestore.BeginTransactionRequest, - dict, -]) -def test_begin_transaction(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BeginTransactionResponse( - transaction=b'transaction_blob', - ) - response = client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - assert response.transaction == b'transaction_blob' - - -def test_begin_transaction_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - client.begin_transaction() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BeginTransactionRequest() - - -def test_begin_transaction_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.BeginTransactionRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - client.begin_transaction(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BeginTransactionRequest( - database='database_value', - ) - -@pytest.mark.asyncio -async def test_begin_transaction_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BeginTransactionResponse( - transaction=b'transaction_blob', - )) - response = await client.begin_transaction() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BeginTransactionRequest() - -@pytest.mark.asyncio -async def test_begin_transaction_async(transport: str = 'grpc_asyncio', request_type=firestore.BeginTransactionRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.BeginTransactionResponse( - transaction=b'transaction_blob', - )) - response = await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - assert response.transaction == b'transaction_blob' - - -@pytest.mark.asyncio -async def test_begin_transaction_async_from_dict(): - await test_begin_transaction_async(request_type=dict) - - -def test_begin_transaction_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BeginTransactionRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = firestore.BeginTransactionResponse() - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_begin_transaction_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BeginTransactionRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BeginTransactionResponse()) - await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_begin_transaction_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BeginTransactionResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.begin_transaction( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - - -def test_begin_transaction_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - firestore.BeginTransactionRequest(), - database='database_value', - ) - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BeginTransactionResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BeginTransactionResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.begin_transaction( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.begin_transaction( - firestore.BeginTransactionRequest(), - database='database_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.CommitRequest, - dict, -]) -def test_commit(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.CommitResponse( - ) - response = client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) - - -def test_commit_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - client.commit() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CommitRequest() - - -def test_commit_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.CommitRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - client.commit(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CommitRequest( - database='database_value', - ) - -@pytest.mark.asyncio -async def test_commit_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.CommitResponse( - )) - response = await client.commit() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CommitRequest() - -@pytest.mark.asyncio -async def test_commit_async(transport: str = 'grpc_asyncio', request_type=firestore.CommitRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.CommitResponse( - )) - response = await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) - - -@pytest.mark.asyncio -async def test_commit_async_from_dict(): - await test_commit_async(request_type=dict) - - -def test_commit_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CommitRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = firestore.CommitResponse() - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_commit_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CommitRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.CommitResponse()) - await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_commit_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.CommitResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.commit( - database='database_value', - writes=[gf_write.Write(update=document.Document(name='name_value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].writes - mock_val = [gf_write.Write(update=document.Document(name='name_value'))] - assert arg == mock_val - - -def test_commit_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - firestore.CommitRequest(), - database='database_value', - writes=[gf_write.Write(update=document.Document(name='name_value'))], - ) - -@pytest.mark.asyncio -async def test_commit_flattened_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.CommitResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.CommitResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.commit( - database='database_value', - writes=[gf_write.Write(update=document.Document(name='name_value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].writes - mock_val = [gf_write.Write(update=document.Document(name='name_value'))] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_commit_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.commit( - firestore.CommitRequest(), - database='database_value', - writes=[gf_write.Write(update=document.Document(name='name_value'))], - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.RollbackRequest, - dict, -]) -def test_rollback(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_rollback_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - client.rollback() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RollbackRequest() - - -def test_rollback_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.RollbackRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - client.rollback(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RollbackRequest( - database='database_value', - ) - -@pytest.mark.asyncio -async def test_rollback_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.rollback() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RollbackRequest() - -@pytest.mark.asyncio -async def test_rollback_async(transport: str = 'grpc_asyncio', request_type=firestore.RollbackRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_rollback_async_from_dict(): - await test_rollback_async(request_type=dict) - - -def test_rollback_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RollbackRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = None - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rollback_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RollbackRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_rollback_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rollback( - database='database_value', - transaction=b'transaction_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].transaction - mock_val = b'transaction_blob' - assert arg == mock_val - - -def test_rollback_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - firestore.RollbackRequest(), - database='database_value', - transaction=b'transaction_blob', - ) - -@pytest.mark.asyncio -async def test_rollback_flattened_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rollback( - database='database_value', - transaction=b'transaction_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].transaction - mock_val = b'transaction_blob' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rollback_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rollback( - firestore.RollbackRequest(), - database='database_value', - transaction=b'transaction_blob', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.RunQueryRequest, - dict, -]) -def test_run_query(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.RunQueryResponse()]) - response = client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.RunQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.RunQueryResponse) - - -def test_run_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - client.run_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunQueryRequest() - - -def test_run_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.RunQueryRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - client.run_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunQueryRequest( - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_run_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunQueryResponse()]) - response = await client.run_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunQueryRequest() - -@pytest.mark.asyncio -async def test_run_query_async(transport: str = 'grpc_asyncio', request_type=firestore.RunQueryRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunQueryResponse()]) - response = await client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.RunQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.RunQueryResponse) - - -@pytest.mark.asyncio -async def test_run_query_async_from_dict(): - await test_run_query_async(request_type=dict) - - -def test_run_query_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - call.return_value = iter([firestore.RunQueryResponse()]) - client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_run_query_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunQueryResponse()]) - await client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - firestore.RunAggregationQueryRequest, - dict, -]) -def test_run_aggregation_query(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.RunAggregationQueryResponse()]) - response = client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.RunAggregationQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.RunAggregationQueryResponse) - - -def test_run_aggregation_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - client.run_aggregation_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunAggregationQueryRequest() - - -def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.RunAggregationQueryRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - client.run_aggregation_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunAggregationQueryRequest( - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_run_aggregation_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunAggregationQueryResponse()]) - response = await client.run_aggregation_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunAggregationQueryRequest() - -@pytest.mark.asyncio -async def test_run_aggregation_query_async(transport: str = 'grpc_asyncio', request_type=firestore.RunAggregationQueryRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunAggregationQueryResponse()]) - response = await client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.RunAggregationQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.RunAggregationQueryResponse) - - -@pytest.mark.asyncio -async def test_run_aggregation_query_async_from_dict(): - await test_run_aggregation_query_async(request_type=dict) - - -def test_run_aggregation_query_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunAggregationQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - call.return_value = iter([firestore.RunAggregationQueryResponse()]) - client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_run_aggregation_query_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunAggregationQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.RunAggregationQueryResponse()]) - await client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - firestore.PartitionQueryRequest, - dict, -]) -def test_partition_query(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.PartitionQueryResponse( - next_page_token='next_page_token_value', - ) - response = client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.PartitionQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.PartitionQueryPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_partition_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - client.partition_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.PartitionQueryRequest() - - -def test_partition_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.PartitionQueryRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - client.partition_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.PartitionQueryRequest( - parent='parent_value', - page_token='page_token_value', - ) - -@pytest.mark.asyncio -async def test_partition_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.PartitionQueryResponse( - next_page_token='next_page_token_value', - )) - response = await client.partition_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.PartitionQueryRequest() - -@pytest.mark.asyncio -async def test_partition_query_async(transport: str = 'grpc_asyncio', request_type=firestore.PartitionQueryRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.PartitionQueryResponse( - next_page_token='next_page_token_value', - )) - response = await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.PartitionQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.PartitionQueryAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_partition_query_async_from_dict(): - await test_partition_query_async(request_type=dict) - - -def test_partition_query_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.PartitionQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = firestore.PartitionQueryResponse() - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_partition_query_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.PartitionQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.PartitionQueryResponse()) - await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_partition_query_pager(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token='abc', - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token='def', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token='ghi', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.partition_query(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, query.Cursor) - for i in results) -def test_partition_query_pages(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token='abc', - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token='def', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token='ghi', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - RuntimeError, - ) - pages = list(client.partition_query(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_partition_query_async_pager(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token='abc', - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token='def', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token='ghi', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - RuntimeError, - ) - async_pager = await client.partition_query(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, query.Cursor) - for i in responses) - - -@pytest.mark.asyncio -async def test_partition_query_async_pages(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token='abc', - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token='def', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token='ghi', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.partition_query(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - firestore.WriteRequest, - dict, -]) -def test_write(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.WriteResponse()]) - response = client.write(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.WriteResponse) - - -@pytest.mark.asyncio -async def test_write_async(transport: str = 'grpc_asyncio', request_type=firestore.WriteRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.WriteResponse()]) - response = await client.write(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.WriteResponse) - - -@pytest.mark.asyncio -async def test_write_async_from_dict(): - await test_write_async(request_type=dict) - - -@pytest.mark.parametrize("request_type", [ - firestore.ListenRequest, - dict, -]) -def test_listen(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.listen), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.ListenResponse()]) - response = client.listen(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.ListenResponse) - - -@pytest.mark.asyncio -async def test_listen_async(transport: str = 'grpc_asyncio', request_type=firestore.ListenRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.listen), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.ListenResponse()]) - response = await client.listen(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.ListenResponse) - - -@pytest.mark.asyncio -async def test_listen_async_from_dict(): - await test_listen_async(request_type=dict) - - -@pytest.mark.parametrize("request_type", [ - firestore.ListCollectionIdsRequest, - dict, -]) -def test_list_collection_ids(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListCollectionIdsResponse( - collection_ids=['collection_ids_value'], - next_page_token='next_page_token_value', - ) - response = client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.ListCollectionIdsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCollectionIdsPager) - assert response.collection_ids == ['collection_ids_value'] - assert response.next_page_token == 'next_page_token_value' - - -def test_list_collection_ids_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - client.list_collection_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListCollectionIdsRequest() - - -def test_list_collection_ids_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.ListCollectionIdsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - client.list_collection_ids(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListCollectionIdsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -@pytest.mark.asyncio -async def test_list_collection_ids_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListCollectionIdsResponse( - collection_ids=['collection_ids_value'], - next_page_token='next_page_token_value', - )) - response = await client.list_collection_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListCollectionIdsRequest() - -@pytest.mark.asyncio -async def test_list_collection_ids_async(transport: str = 'grpc_asyncio', request_type=firestore.ListCollectionIdsRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListCollectionIdsResponse( - collection_ids=['collection_ids_value'], - next_page_token='next_page_token_value', - )) - response = await client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.ListCollectionIdsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCollectionIdsAsyncPager) - assert response.collection_ids == ['collection_ids_value'] - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_collection_ids_async_from_dict(): - await test_list_collection_ids_async(request_type=dict) - - -def test_list_collection_ids_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListCollectionIdsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - call.return_value = firestore.ListCollectionIdsResponse() - client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_collection_ids_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListCollectionIdsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListCollectionIdsResponse()) - await client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_collection_ids_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListCollectionIdsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_collection_ids( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_collection_ids_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_collection_ids( - firestore.ListCollectionIdsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_collection_ids_flattened_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListCollectionIdsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.ListCollectionIdsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_collection_ids( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_collection_ids_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_collection_ids( - firestore.ListCollectionIdsRequest(), - parent='parent_value', - ) - - -def test_list_collection_ids_pager(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token='def', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token='ghi', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_collection_ids(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) -def test_list_collection_ids_pages(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token='def', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token='ghi', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = list(client.list_collection_ids(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_collection_ids_async_pager(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token='def', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token='ghi', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_collection_ids(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, str) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_collection_ids_async_pages(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token='def', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token='ghi', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_collection_ids(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - firestore.BatchWriteRequest, - dict, -]) -def test_batch_write(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BatchWriteResponse( - ) - response = client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.BatchWriteRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchWriteResponse) - - -def test_batch_write_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - client.batch_write() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchWriteRequest() - - -def test_batch_write_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.BatchWriteRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - client.batch_write(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchWriteRequest( - database='database_value', - ) - -@pytest.mark.asyncio -async def test_batch_write_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BatchWriteResponse( - )) - response = await client.batch_write() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchWriteRequest() - -@pytest.mark.asyncio -async def test_batch_write_async(transport: str = 'grpc_asyncio', request_type=firestore.BatchWriteRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore.BatchWriteResponse( - )) - response = await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.BatchWriteRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchWriteResponse) - - -@pytest.mark.asyncio -async def test_batch_write_async_from_dict(): - await test_batch_write_async(request_type=dict) - - -def test_batch_write_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchWriteRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value = firestore.BatchWriteResponse() - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_write_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchWriteRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore.BatchWriteResponse()) - await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - firestore.CreateDocumentRequest, - dict, -]) -def test_create_document(request_type, transport: str = 'grpc'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = document.Document( - name='name_value', - ) - response = client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.CreateDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == 'name_value' - - -def test_create_document_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_document), - '__call__') as call: - client.create_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CreateDocumentRequest() - - -def test_create_document_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.CreateDocumentRequest( - parent='parent_value', - collection_id='collection_id_value', - document_id='document_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_document), - '__call__') as call: - client.create_document(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CreateDocumentRequest( - parent='parent_value', - collection_id='collection_id_value', - document_id='document_id_value', - ) - -@pytest.mark.asyncio -async def test_create_document_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document( - name='name_value', - )) - response = await client.create_document() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CreateDocumentRequest() - -@pytest.mark.asyncio -async def test_create_document_async(transport: str = 'grpc_asyncio', request_type=firestore.CreateDocumentRequest): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_document), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document.Document( - name='name_value', - )) - response = await client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.CreateDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_document_async_from_dict(): - await test_create_document_async(request_type=dict) - - -def test_create_document_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CreateDocumentRequest() - - request.parent = 'parent_value' - request.collection_id = 'collection_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_document), - '__call__') as call: - call.return_value = document.Document() - client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value&collection_id=collection_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_document_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CreateDocumentRequest() - - request.parent = 'parent_value' - request.collection_id = 'collection_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_document), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) - await client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value&collection_id=collection_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - firestore.GetDocumentRequest, - dict, -]) -def test_get_document_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = document.Document( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_document(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == 'name_value' - - -def test_get_document_rest_required_fields(request_type=firestore.GetDocumentRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("mask", "read_time", "transaction", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = document.Document() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_document(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_document_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(("mask", "readTime", "transaction", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_get_document") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_get_document") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.GetDocumentRequest.pb(firestore.GetDocumentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) - - request = firestore.GetDocumentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = document.Document() - - client.get_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_document_rest_bad_request(transport: str = 'rest', request_type=firestore.GetDocumentRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_document(request) - - -def test_get_document_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.ListDocumentsRequest, - dict, -]) -def test_list_documents_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents/sample3/sample4', 'collection_id': 'sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.ListDocumentsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.ListDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_documents(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_documents_rest_required_fields(request_type=firestore.ListDocumentsRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_documents._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("mask", "order_by", "page_size", "page_token", "read_time", "show_missing", "transaction", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.ListDocumentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.ListDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_documents(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_documents_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(("mask", "orderBy", "pageSize", "pageToken", "readTime", "showMissing", "transaction", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_list_documents") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_list_documents") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.ListDocumentsRequest.pb(firestore.ListDocumentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.ListDocumentsResponse.to_json(firestore.ListDocumentsResponse()) - - request = firestore.ListDocumentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.ListDocumentsResponse() - - client.list_documents(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_documents_rest_bad_request(transport: str = 'rest', request_type=firestore.ListDocumentsRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents/sample3/sample4', 'collection_id': 'sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_documents(request) - - -def test_list_documents_rest_pager(transport: str = 'rest'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token='abc', - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token='def', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token='ghi', - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(firestore.ListDocumentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/databases/sample2/documents/sample3/sample4', 'collection_id': 'sample5'} - - pager = client.list_documents(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) - for i in results) - - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - firestore.UpdateDocumentRequest, - dict, -]) -def test_update_document_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'document': {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'}} - request_init["document"] = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4', 'fields': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gf_document.Document( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gf_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_document(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - assert response.name == 'name_value' - - -def test_update_document_rest_required_fields(request_type=firestore.UpdateDocumentRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("current_document", "mask", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gf_document.Document() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gf_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_document(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_document_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(("currentDocument", "mask", "updateMask", )) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_update_document") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_update_document") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.UpdateDocumentRequest.pb(firestore.UpdateDocumentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = gf_document.Document.to_json(gf_document.Document()) - - request = firestore.UpdateDocumentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gf_document.Document() - - client.update_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_document_rest_bad_request(transport: str = 'rest', request_type=firestore.UpdateDocumentRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'document': {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_document(request) - - -def test_update_document_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gf_document.Document() - - # get arguments that satisfy an http rule for this method - sample_request = {'document': {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - document=gf_document.Document(name='name_value'), - update_mask=common.DocumentMask(field_paths=['field_paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gf_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{document.name=projects/*/databases/*/documents/*/**}" % client.transport._host, args[1]) - - -def test_update_document_rest_flattened_error(transport: str = 'rest'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_document( - firestore.UpdateDocumentRequest(), - document=gf_document.Document(name='name_value'), - update_mask=common.DocumentMask(field_paths=['field_paths_value']), - ) - - -def test_update_document_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.DeleteDocumentRequest, - dict, -]) -def test_delete_document_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_document(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_document_rest_required_fields(request_type=firestore.DeleteDocumentRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("current_document", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_document(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_document_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(("currentDocument", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_delete_document") as pre: - pre.assert_not_called() - pb_message = firestore.DeleteDocumentRequest.pb(firestore.DeleteDocumentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = firestore.DeleteDocumentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_document_rest_bad_request(transport: str = 'rest', request_type=firestore.DeleteDocumentRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2/documents/sample3/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*/documents/*/**}" % client.transport._host, args[1]) - - -def test_delete_document_rest_flattened_error(transport: str = 'rest'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - firestore.DeleteDocumentRequest(), - name='name_value', - ) - - -def test_delete_document_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.BatchGetDocumentsRequest, - dict, -]) -def test_batch_get_documents_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BatchGetDocumentsResponse( - transaction=b'transaction_blob', - missing='missing_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.BatchGetDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.batch_get_documents(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchGetDocumentsResponse) - assert response.transaction == b'transaction_blob' - - -def test_batch_get_documents_rest_required_fields(request_type=firestore.BatchGetDocumentsRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.BatchGetDocumentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.BatchGetDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.batch_get_documents(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_get_documents_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_get_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_batch_get_documents") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_batch_get_documents") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.BatchGetDocumentsRequest.pb(firestore.BatchGetDocumentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.BatchGetDocumentsResponse.to_json(firestore.BatchGetDocumentsResponse()) - req.return_value._content = "[{}]".format(req.return_value._content) - - request = firestore.BatchGetDocumentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.BatchGetDocumentsResponse() - - client.batch_get_documents(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_batch_get_documents_rest_bad_request(transport: str = 'rest', request_type=firestore.BatchGetDocumentsRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_get_documents(request) - - -def test_batch_get_documents_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.BeginTransactionRequest, - dict, -]) -def test_begin_transaction_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BeginTransactionResponse( - transaction=b'transaction_blob', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.begin_transaction(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - assert response.transaction == b'transaction_blob' - - -def test_begin_transaction_rest_required_fields(request_type=firestore.BeginTransactionRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.BeginTransactionResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.begin_transaction(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_begin_transaction_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.begin_transaction._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_begin_transaction_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_begin_transaction") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_begin_transaction") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.BeginTransactionRequest.pb(firestore.BeginTransactionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.BeginTransactionResponse.to_json(firestore.BeginTransactionResponse()) - - request = firestore.BeginTransactionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.BeginTransactionResponse() - - client.begin_transaction(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_begin_transaction_rest_bad_request(transport: str = 'rest', request_type=firestore.BeginTransactionRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.begin_transaction(request) - - -def test_begin_transaction_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BeginTransactionResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/databases/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.begin_transaction(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/databases/*}/documents:beginTransaction" % client.transport._host, args[1]) - - -def test_begin_transaction_rest_flattened_error(transport: str = 'rest'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - firestore.BeginTransactionRequest(), - database='database_value', - ) - - -def test_begin_transaction_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.CommitRequest, - dict, -]) -def test_commit_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.CommitResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.commit(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) - - -def test_commit_rest_required_fields(request_type=firestore.CommitRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.CommitResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.commit(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_commit_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.commit._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_commit_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_commit") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_commit") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.CommitRequest.pb(firestore.CommitRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.CommitResponse.to_json(firestore.CommitResponse()) - - request = firestore.CommitRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.CommitResponse() - - client.commit(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_commit_rest_bad_request(transport: str = 'rest', request_type=firestore.CommitRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.commit(request) - - -def test_commit_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.CommitResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/databases/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - writes=[gf_write.Write(update=document.Document(name='name_value'))], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.commit(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/databases/*}/documents:commit" % client.transport._host, args[1]) - - -def test_commit_rest_flattened_error(transport: str = 'rest'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - firestore.CommitRequest(), - database='database_value', - writes=[gf_write.Write(update=document.Document(name='name_value'))], - ) - - -def test_commit_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.RollbackRequest, - dict, -]) -def test_rollback_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.rollback(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_rollback_rest_required_fields(request_type=firestore.RollbackRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request_init["transaction"] = b'' - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - jsonified_request["transaction"] = b'transaction_blob' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - assert "transaction" in jsonified_request - assert jsonified_request["transaction"] == b'transaction_blob' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.rollback(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rollback_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rollback._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", "transaction", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_rollback") as pre: - pre.assert_not_called() - pb_message = firestore.RollbackRequest.pb(firestore.RollbackRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = firestore.RollbackRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.rollback(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_rollback_rest_bad_request(transport: str = 'rest', request_type=firestore.RollbackRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.rollback(request) - - -def test_rollback_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/databases/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - transaction=b'transaction_blob', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.rollback(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/databases/*}/documents:rollback" % client.transport._host, args[1]) - - -def test_rollback_rest_flattened_error(transport: str = 'rest'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - firestore.RollbackRequest(), - database='database_value', - transaction=b'transaction_blob', - ) - - -def test_rollback_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.RunQueryRequest, - dict, -]) -def test_run_query_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.RunQueryResponse( - transaction=b'transaction_blob', - skipped_results=1633, - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.run_query(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.RunQueryResponse) - assert response.transaction == b'transaction_blob' - assert response.skipped_results == 1633 - - -def test_run_query_rest_required_fields(request_type=firestore.RunQueryRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.RunQueryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.run_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_run_query_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.run_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_query_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_run_query") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_run_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.RunQueryRequest.pb(firestore.RunQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.RunQueryResponse.to_json(firestore.RunQueryResponse()) - req.return_value._content = "[{}]".format(req.return_value._content) - - request = firestore.RunQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.RunQueryResponse() - - client.run_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_run_query_rest_bad_request(transport: str = 'rest', request_type=firestore.RunQueryRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_query(request) - - -def test_run_query_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.RunAggregationQueryRequest, - dict, -]) -def test_run_aggregation_query_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.RunAggregationQueryResponse( - transaction=b'transaction_blob', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.run_aggregation_query(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.RunAggregationQueryResponse) - assert response.transaction == b'transaction_blob' - - -def test_run_aggregation_query_rest_required_fields(request_type=firestore.RunAggregationQueryRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_aggregation_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_aggregation_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.RunAggregationQueryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.run_aggregation_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_run_aggregation_query_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.run_aggregation_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_aggregation_query_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_run_aggregation_query") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_run_aggregation_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.RunAggregationQueryRequest.pb(firestore.RunAggregationQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.RunAggregationQueryResponse.to_json(firestore.RunAggregationQueryResponse()) - req.return_value._content = "[{}]".format(req.return_value._content) - - request = firestore.RunAggregationQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.RunAggregationQueryResponse() - - client.run_aggregation_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_run_aggregation_query_rest_bad_request(transport: str = 'rest', request_type=firestore.RunAggregationQueryRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_aggregation_query(request) - - -def test_run_aggregation_query_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.PartitionQueryRequest, - dict, -]) -def test_partition_query_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.PartitionQueryResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.PartitionQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.partition_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.PartitionQueryPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_partition_query_rest_required_fields(request_type=firestore.PartitionQueryRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.PartitionQueryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.PartitionQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.partition_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_partition_query_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.partition_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_partition_query_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_partition_query") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_partition_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.PartitionQueryRequest.pb(firestore.PartitionQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.PartitionQueryResponse.to_json(firestore.PartitionQueryResponse()) - - request = firestore.PartitionQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.PartitionQueryResponse() - - client.partition_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_partition_query_rest_bad_request(transport: str = 'rest', request_type=firestore.PartitionQueryRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.partition_query(request) - - -def test_partition_query_rest_pager(transport: str = 'rest'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token='abc', - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token='def', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token='ghi', - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(firestore.PartitionQueryResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/databases/sample2/documents'} - - pager = client.partition_query(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, query.Cursor) - for i in results) - - pages = list(client.partition_query(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_write_rest_unimplemented(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = firestore.WriteRequest() - requests = [request] - with pytest.raises(NotImplementedError): - client.write(requests) - - -def test_listen_rest_unimplemented(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = firestore.ListenRequest() - requests = [request] - with pytest.raises(NotImplementedError): - client.listen(requests) - - -@pytest.mark.parametrize("request_type", [ - firestore.ListCollectionIdsRequest, - dict, -]) -def test_list_collection_ids_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.ListCollectionIdsResponse( - collection_ids=['collection_ids_value'], - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.ListCollectionIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_collection_ids(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCollectionIdsPager) - assert response.collection_ids == ['collection_ids_value'] - assert response.next_page_token == 'next_page_token_value' - - -def test_list_collection_ids_rest_required_fields(request_type=firestore.ListCollectionIdsRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_collection_ids._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_collection_ids._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.ListCollectionIdsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.ListCollectionIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_collection_ids(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_collection_ids_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_collection_ids._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_collection_ids_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_list_collection_ids") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_list_collection_ids") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.ListCollectionIdsRequest.pb(firestore.ListCollectionIdsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.ListCollectionIdsResponse.to_json(firestore.ListCollectionIdsResponse()) - - request = firestore.ListCollectionIdsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.ListCollectionIdsResponse() - - client.list_collection_ids(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_collection_ids_rest_bad_request(transport: str = 'rest', request_type=firestore.ListCollectionIdsRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_collection_ids(request) - - -def test_list_collection_ids_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.ListCollectionIdsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/databases/sample2/documents'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.ListCollectionIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_collection_ids(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/databases/*/documents}:listCollectionIds" % client.transport._host, args[1]) - - -def test_list_collection_ids_rest_flattened_error(transport: str = 'rest'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_collection_ids( - firestore.ListCollectionIdsRequest(), - parent='parent_value', - ) - - -def test_list_collection_ids_rest_pager(transport: str = 'rest'): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token='def', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token='ghi', - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(firestore.ListCollectionIdsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/databases/sample2/documents'} - - pager = client.list_collection_ids(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) - - pages = list(client.list_collection_ids(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - firestore.BatchWriteRequest, - dict, -]) -def test_batch_write_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BatchWriteResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.batch_write(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchWriteResponse) - - -def test_batch_write_rest_required_fields(request_type=firestore.BatchWriteRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.BatchWriteResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.batch_write(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_write_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_write._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_write_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_batch_write") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_batch_write") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.BatchWriteRequest.pb(firestore.BatchWriteRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore.BatchWriteResponse.to_json(firestore.BatchWriteResponse()) - - request = firestore.BatchWriteRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.BatchWriteResponse() - - client.batch_write(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_batch_write_rest_bad_request(transport: str = 'rest', request_type=firestore.BatchWriteRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_write(request) - - -def test_batch_write_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore.CreateDocumentRequest, - dict, -]) -def test_create_document_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents/sample3', 'collection_id': 'sample4'} - request_init["document"] = {'name': 'name_value', 'fields': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore.CreateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = document.Document( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_document(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == 'name_value' - - -def test_create_document_rest_required_fields(request_type=firestore.CreateDocumentRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["collection_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["collectionId"] = 'collection_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id", "mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "collectionId" in jsonified_request - assert jsonified_request["collectionId"] == 'collection_id_value' - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = document.Document() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_document(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_document_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(("documentId", "mask", )) & set(("parent", "collectionId", "document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreRestInterceptor, "post_create_document") as post, \ - mock.patch.object(transports.FirestoreRestInterceptor, "pre_create_document") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore.CreateDocumentRequest.pb(firestore.CreateDocumentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) - - request = firestore.CreateDocumentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = document.Document() - - client.create_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_document_rest_bad_request(transport: str = 'rest', request_type=firestore.CreateDocumentRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/documents/sample3', 'collection_id': 'sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_document(request) - - -def test_create_document_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = FirestoreClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.FirestoreGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.FirestoreGrpcTransport, - transports.FirestoreGrpcAsyncIOTransport, - transports.FirestoreRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = FirestoreClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.FirestoreGrpcTransport, - ) - -def test_firestore_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.FirestoreTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_firestore_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.FirestoreTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'get_document', - 'list_documents', - 'update_document', - 'delete_document', - 'batch_get_documents', - 'begin_transaction', - 'commit', - 'rollback', - 'run_query', - 'run_aggregation_query', - 'partition_query', - 'write', - 'listen', - 'list_collection_ids', - 'batch_write', - 'create_document', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_firestore_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirestoreTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - quota_project_id="octopus", - ) - - -def test_firestore_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirestoreTransport() - adc.assert_called_once() - - -def test_firestore_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - FirestoreClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreGrpcTransport, - transports.FirestoreGrpcAsyncIOTransport, - ], -) -def test_firestore_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/datastore',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreGrpcTransport, - transports.FirestoreGrpcAsyncIOTransport, - transports.FirestoreRestTransport, - ], -) -def test_firestore_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.FirestoreGrpcTransport, grpc_helpers), - (transports.FirestoreGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_firestore_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "firestore.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - scopes=["1", "2"], - default_host="firestore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport]) -def test_firestore_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_firestore_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.FirestoreRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_firestore_host_no_port(transport_name): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='firestore.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'firestore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://firestore.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_firestore_host_with_port(transport_name): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='firestore.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'firestore.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://firestore.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_firestore_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = FirestoreClient( - credentials=creds1, - transport=transport_name, - ) - client2 = FirestoreClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_document._session - session2 = client2.transport.get_document._session - assert session1 != session2 - session1 = client1.transport.list_documents._session - session2 = client2.transport.list_documents._session - assert session1 != session2 - session1 = client1.transport.update_document._session - session2 = client2.transport.update_document._session - assert session1 != session2 - session1 = client1.transport.delete_document._session - session2 = client2.transport.delete_document._session - assert session1 != session2 - session1 = client1.transport.batch_get_documents._session - session2 = client2.transport.batch_get_documents._session - assert session1 != session2 - session1 = client1.transport.begin_transaction._session - session2 = client2.transport.begin_transaction._session - assert session1 != session2 - session1 = client1.transport.commit._session - session2 = client2.transport.commit._session - assert session1 != session2 - session1 = client1.transport.rollback._session - session2 = client2.transport.rollback._session - assert session1 != session2 - session1 = client1.transport.run_query._session - session2 = client2.transport.run_query._session - assert session1 != session2 - session1 = client1.transport.run_aggregation_query._session - session2 = client2.transport.run_aggregation_query._session - assert session1 != session2 - session1 = client1.transport.partition_query._session - session2 = client2.transport.partition_query._session - assert session1 != session2 - session1 = client1.transport.write._session - session2 = client2.transport.write._session - assert session1 != session2 - session1 = client1.transport.listen._session - session2 = client2.transport.listen._session - assert session1 != session2 - session1 = client1.transport.list_collection_ids._session - session2 = client2.transport.list_collection_ids._session - assert session1 != session2 - session1 = client1.transport.batch_write._session - session2 = client2.transport.batch_write._session - assert session1 != session2 - session1 = client1.transport.create_document._session - session2 = client2.transport.create_document._session - assert session1 != session2 -def test_firestore_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FirestoreGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_firestore_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FirestoreGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport]) -def test_firestore_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport]) -def test_firestore_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = FirestoreClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = FirestoreClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = FirestoreClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = FirestoreClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = FirestoreClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = FirestoreClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = FirestoreClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = FirestoreClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = FirestoreClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = FirestoreClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.FirestoreTransport, '_prep_wrapped_messages') as prep: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.FirestoreTransport, '_prep_wrapped_messages') as prep: - transport_class = FirestoreClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = FirestoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (FirestoreClient, transports.FirestoreGrpcTransport), - (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/firestore_admin/v1/.coveragerc b/owl-bot-staging/firestore_admin/v1/.coveragerc deleted file mode 100644 index 2f9de152b2..0000000000 --- a/owl-bot-staging/firestore_admin/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/firestore_admin/__init__.py - google/cloud/firestore_admin/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/firestore_admin/v1/.flake8 b/owl-bot-staging/firestore_admin/v1/.flake8 deleted file mode 100644 index 29227d4cf4..0000000000 --- a/owl-bot-staging/firestore_admin/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/firestore_admin/v1/MANIFEST.in b/owl-bot-staging/firestore_admin/v1/MANIFEST.in deleted file mode 100644 index b3d50a1888..0000000000 --- a/owl-bot-staging/firestore_admin/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/firestore_admin *.py -recursive-include google/cloud/firestore_admin_v1 *.py diff --git a/owl-bot-staging/firestore_admin/v1/README.rst b/owl-bot-staging/firestore_admin/v1/README.rst deleted file mode 100644 index 8704400bdb..0000000000 --- a/owl-bot-staging/firestore_admin/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Firestore Admin API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Firestore Admin API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/firestore_admin/v1/docs/_static/custom.css b/owl-bot-staging/firestore_admin/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b5..0000000000 --- a/owl-bot-staging/firestore_admin/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/firestore_admin/v1/docs/conf.py b/owl-bot-staging/firestore_admin/v1/docs/conf.py deleted file mode 100644 index 5a3c49c61d..0000000000 --- a/owl-bot-staging/firestore_admin/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-firestore-admin documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-firestore-admin" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-firestore-admin-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-firestore-admin.tex", - u"google-cloud-firestore-admin Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-firestore-admin", - u"Google Cloud Firestore Admin Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-firestore-admin", - u"google-cloud-firestore-admin Documentation", - author, - "google-cloud-firestore-admin", - "GAPIC library for Google Cloud Firestore Admin API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/firestore_admin.rst b/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/firestore_admin.rst deleted file mode 100644 index 59a8ccdfb5..0000000000 --- a/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/firestore_admin.rst +++ /dev/null @@ -1,10 +0,0 @@ -FirestoreAdmin --------------------------------- - -.. automodule:: google.cloud.firestore_admin_v1.services.firestore_admin - :members: - :inherited-members: - -.. automodule:: google.cloud.firestore_admin_v1.services.firestore_admin.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/services_.rst b/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/services_.rst deleted file mode 100644 index 24782d194c..0000000000 --- a/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Firestore Admin v1 API -================================================ -.. toctree:: - :maxdepth: 2 - - firestore_admin diff --git a/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/types_.rst b/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/types_.rst deleted file mode 100644 index 9396fc9eaf..0000000000 --- a/owl-bot-staging/firestore_admin/v1/docs/firestore_admin_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Firestore Admin v1 API -============================================= - -.. automodule:: google.cloud.firestore_admin_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/firestore_admin/v1/docs/index.rst b/owl-bot-staging/firestore_admin/v1/docs/index.rst deleted file mode 100644 index 701eea6e53..0000000000 --- a/owl-bot-staging/firestore_admin/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - firestore_admin_v1/services - firestore_admin_v1/types diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/__init__.py deleted file mode 100644 index 02f3da2116..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/__init__.py +++ /dev/null @@ -1,121 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.firestore_admin import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.firestore_admin_v1.services.firestore_admin.client import FirestoreAdminClient -from google.cloud.firestore_admin_v1.services.firestore_admin.async_client import FirestoreAdminAsyncClient - -from google.cloud.firestore_admin_v1.types.backup import Backup -from google.cloud.firestore_admin_v1.types.database import Database -from google.cloud.firestore_admin_v1.types.field import Field -from google.cloud.firestore_admin_v1.types.firestore_admin import CreateBackupScheduleRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import CreateDatabaseMetadata -from google.cloud.firestore_admin_v1.types.firestore_admin import CreateDatabaseRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import CreateIndexRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteBackupRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteBackupScheduleRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteDatabaseMetadata -from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteDatabaseRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import DeleteIndexRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import ExportDocumentsRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import GetBackupRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import GetBackupScheduleRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import GetDatabaseRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import GetFieldRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import GetIndexRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import ImportDocumentsRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import ListBackupSchedulesRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import ListBackupSchedulesResponse -from google.cloud.firestore_admin_v1.types.firestore_admin import ListBackupsRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import ListBackupsResponse -from google.cloud.firestore_admin_v1.types.firestore_admin import ListDatabasesRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import ListDatabasesResponse -from google.cloud.firestore_admin_v1.types.firestore_admin import ListFieldsRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import ListFieldsResponse -from google.cloud.firestore_admin_v1.types.firestore_admin import ListIndexesRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import ListIndexesResponse -from google.cloud.firestore_admin_v1.types.firestore_admin import RestoreDatabaseRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import UpdateBackupScheduleRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import UpdateDatabaseMetadata -from google.cloud.firestore_admin_v1.types.firestore_admin import UpdateDatabaseRequest -from google.cloud.firestore_admin_v1.types.firestore_admin import UpdateFieldRequest -from google.cloud.firestore_admin_v1.types.index import Index -from google.cloud.firestore_admin_v1.types.location import LocationMetadata -from google.cloud.firestore_admin_v1.types.operation import ExportDocumentsMetadata -from google.cloud.firestore_admin_v1.types.operation import ExportDocumentsResponse -from google.cloud.firestore_admin_v1.types.operation import FieldOperationMetadata -from google.cloud.firestore_admin_v1.types.operation import ImportDocumentsMetadata -from google.cloud.firestore_admin_v1.types.operation import IndexOperationMetadata -from google.cloud.firestore_admin_v1.types.operation import Progress -from google.cloud.firestore_admin_v1.types.operation import RestoreDatabaseMetadata -from google.cloud.firestore_admin_v1.types.operation import OperationState -from google.cloud.firestore_admin_v1.types.schedule import BackupSchedule -from google.cloud.firestore_admin_v1.types.schedule import DailyRecurrence -from google.cloud.firestore_admin_v1.types.schedule import WeeklyRecurrence - -__all__ = ('FirestoreAdminClient', - 'FirestoreAdminAsyncClient', - 'Backup', - 'Database', - 'Field', - 'CreateBackupScheduleRequest', - 'CreateDatabaseMetadata', - 'CreateDatabaseRequest', - 'CreateIndexRequest', - 'DeleteBackupRequest', - 'DeleteBackupScheduleRequest', - 'DeleteDatabaseMetadata', - 'DeleteDatabaseRequest', - 'DeleteIndexRequest', - 'ExportDocumentsRequest', - 'GetBackupRequest', - 'GetBackupScheduleRequest', - 'GetDatabaseRequest', - 'GetFieldRequest', - 'GetIndexRequest', - 'ImportDocumentsRequest', - 'ListBackupSchedulesRequest', - 'ListBackupSchedulesResponse', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'ListFieldsRequest', - 'ListFieldsResponse', - 'ListIndexesRequest', - 'ListIndexesResponse', - 'RestoreDatabaseRequest', - 'UpdateBackupScheduleRequest', - 'UpdateDatabaseMetadata', - 'UpdateDatabaseRequest', - 'UpdateFieldRequest', - 'Index', - 'LocationMetadata', - 'ExportDocumentsMetadata', - 'ExportDocumentsResponse', - 'FieldOperationMetadata', - 'ImportDocumentsMetadata', - 'IndexOperationMetadata', - 'Progress', - 'RestoreDatabaseMetadata', - 'OperationState', - 'BackupSchedule', - 'DailyRecurrence', - 'WeeklyRecurrence', -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/gapic_version.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/gapic_version.py deleted file mode 100644 index 558c8aab67..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/py.typed b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/py.typed deleted file mode 100644 index f7a4796eee..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-firestore-admin package uses inline types. diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/__init__.py deleted file mode 100644 index d26d37256c..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/__init__.py +++ /dev/null @@ -1,122 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.firestore_admin_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.firestore_admin import FirestoreAdminClient -from .services.firestore_admin import FirestoreAdminAsyncClient - -from .types.backup import Backup -from .types.database import Database -from .types.field import Field -from .types.firestore_admin import CreateBackupScheduleRequest -from .types.firestore_admin import CreateDatabaseMetadata -from .types.firestore_admin import CreateDatabaseRequest -from .types.firestore_admin import CreateIndexRequest -from .types.firestore_admin import DeleteBackupRequest -from .types.firestore_admin import DeleteBackupScheduleRequest -from .types.firestore_admin import DeleteDatabaseMetadata -from .types.firestore_admin import DeleteDatabaseRequest -from .types.firestore_admin import DeleteIndexRequest -from .types.firestore_admin import ExportDocumentsRequest -from .types.firestore_admin import GetBackupRequest -from .types.firestore_admin import GetBackupScheduleRequest -from .types.firestore_admin import GetDatabaseRequest -from .types.firestore_admin import GetFieldRequest -from .types.firestore_admin import GetIndexRequest -from .types.firestore_admin import ImportDocumentsRequest -from .types.firestore_admin import ListBackupSchedulesRequest -from .types.firestore_admin import ListBackupSchedulesResponse -from .types.firestore_admin import ListBackupsRequest -from .types.firestore_admin import ListBackupsResponse -from .types.firestore_admin import ListDatabasesRequest -from .types.firestore_admin import ListDatabasesResponse -from .types.firestore_admin import ListFieldsRequest -from .types.firestore_admin import ListFieldsResponse -from .types.firestore_admin import ListIndexesRequest -from .types.firestore_admin import ListIndexesResponse -from .types.firestore_admin import RestoreDatabaseRequest -from .types.firestore_admin import UpdateBackupScheduleRequest -from .types.firestore_admin import UpdateDatabaseMetadata -from .types.firestore_admin import UpdateDatabaseRequest -from .types.firestore_admin import UpdateFieldRequest -from .types.index import Index -from .types.location import LocationMetadata -from .types.operation import ExportDocumentsMetadata -from .types.operation import ExportDocumentsResponse -from .types.operation import FieldOperationMetadata -from .types.operation import ImportDocumentsMetadata -from .types.operation import IndexOperationMetadata -from .types.operation import Progress -from .types.operation import RestoreDatabaseMetadata -from .types.operation import OperationState -from .types.schedule import BackupSchedule -from .types.schedule import DailyRecurrence -from .types.schedule import WeeklyRecurrence - -__all__ = ( - 'FirestoreAdminAsyncClient', -'Backup', -'BackupSchedule', -'CreateBackupScheduleRequest', -'CreateDatabaseMetadata', -'CreateDatabaseRequest', -'CreateIndexRequest', -'DailyRecurrence', -'Database', -'DeleteBackupRequest', -'DeleteBackupScheduleRequest', -'DeleteDatabaseMetadata', -'DeleteDatabaseRequest', -'DeleteIndexRequest', -'ExportDocumentsMetadata', -'ExportDocumentsRequest', -'ExportDocumentsResponse', -'Field', -'FieldOperationMetadata', -'FirestoreAdminClient', -'GetBackupRequest', -'GetBackupScheduleRequest', -'GetDatabaseRequest', -'GetFieldRequest', -'GetIndexRequest', -'ImportDocumentsMetadata', -'ImportDocumentsRequest', -'Index', -'IndexOperationMetadata', -'ListBackupSchedulesRequest', -'ListBackupSchedulesResponse', -'ListBackupsRequest', -'ListBackupsResponse', -'ListDatabasesRequest', -'ListDatabasesResponse', -'ListFieldsRequest', -'ListFieldsResponse', -'ListIndexesRequest', -'ListIndexesResponse', -'LocationMetadata', -'OperationState', -'Progress', -'RestoreDatabaseMetadata', -'RestoreDatabaseRequest', -'UpdateBackupScheduleRequest', -'UpdateDatabaseMetadata', -'UpdateDatabaseRequest', -'UpdateFieldRequest', -'WeeklyRecurrence', -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_metadata.json b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_metadata.json deleted file mode 100644 index 73f37c4180..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_metadata.json +++ /dev/null @@ -1,373 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.firestore_admin_v1", - "protoPackage": "google.firestore.admin.v1", - "schema": "1.0", - "services": { - "FirestoreAdmin": { - "clients": { - "grpc": { - "libraryClient": "FirestoreAdminClient", - "rpcs": { - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateIndex": { - "methods": [ - "create_index" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteIndex": { - "methods": [ - "delete_index" - ] - }, - "ExportDocuments": { - "methods": [ - "export_documents" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetField": { - "methods": [ - "get_field" - ] - }, - "GetIndex": { - "methods": [ - "get_index" - ] - }, - "ImportDocuments": { - "methods": [ - "import_documents" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListFields": { - "methods": [ - "list_fields" - ] - }, - "ListIndexes": { - "methods": [ - "list_indexes" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateField": { - "methods": [ - "update_field" - ] - } - } - }, - "grpc-async": { - "libraryClient": "FirestoreAdminAsyncClient", - "rpcs": { - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateIndex": { - "methods": [ - "create_index" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteIndex": { - "methods": [ - "delete_index" - ] - }, - "ExportDocuments": { - "methods": [ - "export_documents" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetField": { - "methods": [ - "get_field" - ] - }, - "GetIndex": { - "methods": [ - "get_index" - ] - }, - "ImportDocuments": { - "methods": [ - "import_documents" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListFields": { - "methods": [ - "list_fields" - ] - }, - "ListIndexes": { - "methods": [ - "list_indexes" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateField": { - "methods": [ - "update_field" - ] - } - } - }, - "rest": { - "libraryClient": "FirestoreAdminClient", - "rpcs": { - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateIndex": { - "methods": [ - "create_index" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteIndex": { - "methods": [ - "delete_index" - ] - }, - "ExportDocuments": { - "methods": [ - "export_documents" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetField": { - "methods": [ - "get_field" - ] - }, - "GetIndex": { - "methods": [ - "get_index" - ] - }, - "ImportDocuments": { - "methods": [ - "import_documents" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListFields": { - "methods": [ - "list_fields" - ] - }, - "ListIndexes": { - "methods": [ - "list_indexes" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateField": { - "methods": [ - "update_field" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_version.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_version.py deleted file mode 100644 index 558c8aab67..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/py.typed b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/py.typed deleted file mode 100644 index f7a4796eee..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-firestore-admin package uses inline types. diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/__init__.py deleted file mode 100644 index 8f6cf06824..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py deleted file mode 100644 index 7d14cb399e..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import FirestoreAdminClient -from .async_client import FirestoreAdminAsyncClient - -__all__ = ( - 'FirestoreAdminClient', - 'FirestoreAdminAsyncClient', -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py deleted file mode 100644 index 11f8a357df..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py +++ /dev/null @@ -1,3202 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.firestore_admin_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation as gac_operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.firestore_admin_v1.services.firestore_admin import pagers -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import database as gfa_database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import field as gfa_field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import operation as gfa_operation -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport -from .client import FirestoreAdminClient - - -class FirestoreAdminAsyncClient: - """The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - """ - - _client: FirestoreAdminClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = FirestoreAdminClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = FirestoreAdminClient._DEFAULT_UNIVERSE - - backup_path = staticmethod(FirestoreAdminClient.backup_path) - parse_backup_path = staticmethod(FirestoreAdminClient.parse_backup_path) - backup_schedule_path = staticmethod(FirestoreAdminClient.backup_schedule_path) - parse_backup_schedule_path = staticmethod(FirestoreAdminClient.parse_backup_schedule_path) - collection_group_path = staticmethod(FirestoreAdminClient.collection_group_path) - parse_collection_group_path = staticmethod(FirestoreAdminClient.parse_collection_group_path) - database_path = staticmethod(FirestoreAdminClient.database_path) - parse_database_path = staticmethod(FirestoreAdminClient.parse_database_path) - field_path = staticmethod(FirestoreAdminClient.field_path) - parse_field_path = staticmethod(FirestoreAdminClient.parse_field_path) - index_path = staticmethod(FirestoreAdminClient.index_path) - parse_index_path = staticmethod(FirestoreAdminClient.parse_index_path) - location_path = staticmethod(FirestoreAdminClient.location_path) - parse_location_path = staticmethod(FirestoreAdminClient.parse_location_path) - common_billing_account_path = staticmethod(FirestoreAdminClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(FirestoreAdminClient.parse_common_billing_account_path) - common_folder_path = staticmethod(FirestoreAdminClient.common_folder_path) - parse_common_folder_path = staticmethod(FirestoreAdminClient.parse_common_folder_path) - common_organization_path = staticmethod(FirestoreAdminClient.common_organization_path) - parse_common_organization_path = staticmethod(FirestoreAdminClient.parse_common_organization_path) - common_project_path = staticmethod(FirestoreAdminClient.common_project_path) - parse_common_project_path = staticmethod(FirestoreAdminClient.parse_common_project_path) - common_location_path = staticmethod(FirestoreAdminClient.common_location_path) - parse_common_location_path = staticmethod(FirestoreAdminClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAdminAsyncClient: The constructed client. - """ - return FirestoreAdminClient.from_service_account_info.__func__(FirestoreAdminAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAdminAsyncClient: The constructed client. - """ - return FirestoreAdminClient.from_service_account_file.__func__(FirestoreAdminAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return FirestoreAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> FirestoreAdminTransport: - """Returns the transport used by the client instance. - - Returns: - FirestoreAdminTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = functools.partial(type(FirestoreAdminClient).get_transport_class, type(FirestoreAdminClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, FirestoreAdminTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the firestore admin async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.FirestoreAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = FirestoreAdminClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_index(self, - request: Optional[Union[firestore_admin.CreateIndexRequest, dict]] = None, - *, - parent: Optional[str] = None, - index: Optional[gfa_index.Index] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a composite index. This returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the creation. The - metadata for the operation will be the type - [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_create_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateIndexRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateIndexRequest, dict]]): - The request object. The request for - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - index (:class:`google.cloud.firestore_admin_v1.types.Index`): - Required. The composite index to - create. - - This corresponds to the ``index`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.Index` Cloud Firestore indexes enable simple and complex queries against - documents in a database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, index]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.CreateIndexRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if index is not None: - request.index = index - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_index, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_index.Index, - metadata_type=gfa_operation.IndexOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_indexes(self, - request: Optional[Union[firestore_admin.ListIndexesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListIndexesAsyncPager: - r"""Lists composite indexes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_indexes(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListIndexesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListIndexesRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesAsyncPager: - The response for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.ListIndexesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_indexes, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListIndexesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_index(self, - request: Optional[Union[firestore_admin.GetIndexRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> index.Index: - r"""Gets a composite index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetIndexRequest( - name="name_value", - ) - - # Make the request - response = await client.get_index(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetIndexRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.Index: - Cloud Firestore indexes enable simple - and complex queries against documents in - a database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.GetIndexRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_index, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_index(self, - request: Optional[Union[firestore_admin.DeleteIndexRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a composite index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_delete_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteIndexRequest( - name="name_value", - ) - - # Make the request - await client.delete_index(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteIndexRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.DeleteIndexRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_index, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_field(self, - request: Optional[Union[firestore_admin.GetFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> field.Field: - r"""Gets the metadata and configuration for a Field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetFieldRequest( - name="name_value", - ) - - # Make the request - response = await client.get_field(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetFieldRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.Field: - Represents a single field in the - database. - Fields are grouped by their "Collection - Group", which represent all collections - in the database with the same id. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.GetFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_field, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_field(self, - request: Optional[Union[firestore_admin.UpdateFieldRequest, dict]] = None, - *, - field: Optional[gfa_field.Field] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a field configuration. Currently, field updates apply - only to single field index configuration. However, calls to - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] - should provide a field mask to avoid changing any configuration - that the caller isn't aware of. The field mask should be - specified as: ``{ paths: "index_config" }``. - - This call returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the field update. The - metadata for the operation will be the type - [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. - - To configure the default field settings for the database, use - the special ``Field`` with resource name: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_update_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - field = firestore_admin_v1.Field() - field.name = "name_value" - - request = firestore_admin_v1.UpdateFieldRequest( - field=field, - ) - - # Make the request - operation = client.update_field(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.UpdateFieldRequest, dict]]): - The request object. The request for - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - field (:class:`google.cloud.firestore_admin_v1.types.Field`): - Required. The field to be updated. - This corresponds to the ``field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Field` - Represents a single field in the database. - - Fields are grouped by their "Collection Group", which - represent all collections in the database with the - same id. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([field]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.UpdateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if field is not None: - request.field = field - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_field, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("field.name", request.field.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_field.Field, - metadata_type=gfa_operation.FieldOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_fields(self, - request: Optional[Union[firestore_admin.ListFieldsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListFieldsAsyncPager: - r"""Lists the field configuration and metadata for this database. - - Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with the filter set to ``indexConfig.usesAncestorConfig:false`` - or ``ttlConfig:*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_fields(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListFieldsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_fields(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListFieldsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsAsyncPager: - The response for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.ListFieldsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_fields, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListFieldsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def export_documents(self, - request: Optional[Union[firestore_admin.ExportDocumentsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Exports a copy of all or a subset of documents from - Google Cloud Firestore to another storage system, such - as Google Cloud Storage. Recent updates to documents may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - For more details on export behavior and output format, - refer to: - - https://cloud.google.com/firestore/docs/manage-data/export-import - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_export_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ExportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.export_documents(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ExportDocumentsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - name (:class:`str`): - Required. Database to export. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.ExportDocumentsResponse` Returned in the [google.longrunning.Operation][google.longrunning.Operation] - response field. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.ExportDocumentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.export_documents, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_operation.ExportDocumentsResponse, - metadata_type=gfa_operation.ExportDocumentsMetadata, - ) - - # Done; return the response. - return response - - async def import_documents(self, - request: Optional[Union[firestore_admin.ImportDocumentsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Imports documents into Google Cloud Firestore. - Existing documents with the same name are overwritten. - The import occurs in the background and its progress can - be monitored and managed via the Operation resource that - is created. If an ImportDocuments operation is - cancelled, it is possible that a subset of the data has - already been imported to Cloud Firestore. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_import_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ImportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.import_documents(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ImportDocumentsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - name (:class:`str`): - Required. Database to import into. Should be of the - form: ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.ImportDocumentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.import_documents, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=gfa_operation.ImportDocumentsMetadata, - ) - - # Done; return the response. - return response - - async def create_database(self, - request: Optional[Union[firestore_admin.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database: Optional[gfa_database.Database] = None, - database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_create_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateDatabaseRequest, dict]]): - The request object. The request for - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database (:class:`google.cloud.firestore_admin_v1.types.Database`): - Required. The Database to create. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (:class:`str`): - Required. The ID to use for the database, which will - become the final component of the database's resource - name. - - This value should be 4-63 characters. Valid characters - are /[a-z][0-9]-/ with first character a letter and the - last a letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - "(default)" database id is also valid. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database, database_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.CreateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database is not None: - request.database = database - if database_id is not None: - request.database_id = database_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_database, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_database.Database, - metadata_type=firestore_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def get_database(self, - request: Optional[Union[firestore_admin.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> database.Database: - r"""Gets information about a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetDatabaseRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.Database: - A Cloud Firestore Database. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.GetDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_database, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_databases(self, - request: Optional[Union[firestore_admin.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore_admin.ListDatabasesResponse: - r"""List all the databases in the project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_databases(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_databases(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListDatabasesRequest, dict]]): - The request object. A request to list the Firestore - Databases in all locations for a - project. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.ListDatabasesResponse: - The list of databases for a project. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.ListDatabasesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_databases, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_database(self, - request: Optional[Union[firestore_admin.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[gfa_database.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_update_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateDatabaseRequest( - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest, dict]]): - The request object. The request for - [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. - database (:class:`google.cloud.firestore_admin_v1.types.Database`): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.UpdateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_database, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_database.Database, - metadata_type=firestore_admin.UpdateDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def delete_database(self, - request: Optional[Union[firestore_admin.DeleteDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_delete_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.DeleteDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_database, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - database.Database, - metadata_type=firestore_admin.DeleteDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def get_backup(self, - request: Optional[Union[firestore_admin.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> backup.Backup: - r"""Gets information about a backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetBackupRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. - name (:class:`str`): - Required. Name of the backup to fetch. - - Format is - ``projects/{project}/locations/{location}/backups/{backup}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.Backup: - A Backup of a Cloud Firestore - Database. - The backup contains all documents and - index configurations for the given - database at a specific point in time. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.GetBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_backup, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_backups(self, - request: Optional[Union[firestore_admin.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore_admin.ListBackupsResponse: - r"""Lists all the backups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_backups(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_backups(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListBackupsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - parent (:class:`str`): - Required. The location to list backups from. - - Format is ``projects/{project}/locations/{location}``. - Use ``{location} = '-'`` to list backups from all - locations for the given project. This allows listing - backups from a single location or from all locations. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.ListBackupsResponse: - The response for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.ListBackupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_backups, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup(self, - request: Optional[Union[firestore_admin.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_delete_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteBackupRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. - name (:class:`str`): - Required. Name of the backup to delete. - - format is - ``projects/{project}/locations/{location}/backups/{backup}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.DeleteBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_backup, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def restore_database(self, - request: Optional[Union[firestore_admin.RestoreDatabaseRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new database by restoring from an existing backup. - - The new database must be in the same cloud region or - multi-region location as the existing backup. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing backup. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the restore, with the - Operation's [metadata][google.longrunning.Operation.metadata] - field type being the - [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the - restore was successful. The new database is not readable or - writeable until the LRO has completed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_restore_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.RestoreDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - backup="backup_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest, dict]]): - The request object. The request message for - [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.RestoreDatabase]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - request = firestore_admin.RestoreDatabaseRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.restore_database, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - database.Database, - metadata_type=gfa_operation.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def create_backup_schedule(self, - request: Optional[Union[firestore_admin.CreateBackupScheduleRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_schedule: Optional[schedule.BackupSchedule] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schedule.BackupSchedule: - r"""Creates a backup schedule on a database. - At most two backup schedules can be configured on a - database, one daily backup schedule and one weekly - backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_create_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateBackupScheduleRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest, dict]]): - The request object. The request for - [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. - parent (:class:`str`): - Required. The parent database. - - Format ``projects/{project}/databases/{database}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule (:class:`google.cloud.firestore_admin_v1.types.BackupSchedule`): - Required. The backup schedule to - create. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_schedule]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.CreateBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_schedule is not None: - request.backup_schedule = backup_schedule - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_backup_schedule, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_backup_schedule(self, - request: Optional[Union[firestore_admin.GetBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schedule.BackupSchedule: - r"""Gets information about a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. - name (:class:`str`): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.GetBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_backup_schedule, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_backup_schedules(self, - request: Optional[Union[firestore_admin.ListBackupSchedulesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore_admin.ListBackupSchedulesResponse: - r"""List backup schedules. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_backup_schedules(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_backup_schedules(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - parent (:class:`str`): - Required. The parent database. - - Format is ``projects/{project}/databases/{database}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse: - The response for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.ListBackupSchedulesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_backup_schedules, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_backup_schedule(self, - request: Optional[Union[firestore_admin.UpdateBackupScheduleRequest, dict]] = None, - *, - backup_schedule: Optional[schedule.BackupSchedule] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schedule.BackupSchedule: - r"""Updates a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_update_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = await client.update_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest, dict]]): - The request object. The request for - [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. - backup_schedule (:class:`google.cloud.firestore_admin_v1.types.BackupSchedule`): - Required. The backup schedule to - update. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup_schedule, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.UpdateBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_backup_schedule, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup_schedule.name", request.backup_schedule.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup_schedule(self, - request: Optional[Union[firestore_admin.DeleteBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_delete_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup_schedule(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DeleteBackupSchedules][]. - name (:class:`str`): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = firestore_admin.DeleteBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_backup_schedule, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "FirestoreAdminAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "FirestoreAdminAsyncClient", -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/client.py deleted file mode 100644 index 7ce70b29bc..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ /dev/null @@ -1,3570 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.firestore_admin_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.api_core import operation as gac_operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.firestore_admin_v1.services.firestore_admin import pagers -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import database as gfa_database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import field as gfa_field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import operation as gfa_operation -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import FirestoreAdminGrpcTransport -from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport -from .transports.rest import FirestoreAdminRestTransport - - -class FirestoreAdminClientMeta(type): - """Metaclass for the FirestoreAdmin client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreAdminTransport]] - _transport_registry["grpc"] = FirestoreAdminGrpcTransport - _transport_registry["grpc_asyncio"] = FirestoreAdminGrpcAsyncIOTransport - _transport_registry["rest"] = FirestoreAdminRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[FirestoreAdminTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class FirestoreAdminClient(metaclass=FirestoreAdminClientMeta): - """The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "firestore.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "firestore.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> FirestoreAdminTransport: - """Returns the transport used by the client instance. - - Returns: - FirestoreAdminTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def backup_path(project: str,location: str,backup: str,) -> str: - """Returns a fully-qualified backup string.""" - return "projects/{project}/locations/{location}/backups/{backup}".format(project=project, location=location, backup=backup, ) - - @staticmethod - def parse_backup_path(path: str) -> Dict[str,str]: - """Parses a backup path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/backups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def backup_schedule_path(project: str,database: str,backup_schedule: str,) -> str: - """Returns a fully-qualified backup_schedule string.""" - return "projects/{project}/databases/{database}/backupSchedules/{backup_schedule}".format(project=project, database=database, backup_schedule=backup_schedule, ) - - @staticmethod - def parse_backup_schedule_path(path: str) -> Dict[str,str]: - """Parses a backup_schedule path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)/backupSchedules/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def collection_group_path(project: str,database: str,collection: str,) -> str: - """Returns a fully-qualified collection_group string.""" - return "projects/{project}/databases/{database}/collectionGroups/{collection}".format(project=project, database=database, collection=collection, ) - - @staticmethod - def parse_collection_group_path(path: str) -> Dict[str,str]: - """Parses a collection_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def database_path(project: str,database: str,) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/databases/{database}".format(project=project, database=database, ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str,str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def field_path(project: str,database: str,collection: str,field: str,) -> str: - """Returns a fully-qualified field string.""" - return "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format(project=project, database=database, collection=collection, field=field, ) - - @staticmethod - def parse_field_path(path: str) -> Dict[str,str]: - """Parses a field path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/fields/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def index_path(project: str,database: str,collection: str,index: str,) -> str: - """Returns a fully-qualified index string.""" - return "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format(project=project, database=database, collection=collection, index=index, ) - - @staticmethod - def parse_index_path(path: str) -> Dict[str,str]: - """Parses a index path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/indexes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def location_path(project: str,location: str,) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_location_path(path: str) -> Dict[str,str]: - """Parses a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = FirestoreAdminClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - FirestoreAdminClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, FirestoreAdminTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the firestore admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, FirestoreAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = FirestoreAdminClient._read_environment_variables() - self._client_cert_source = FirestoreAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = FirestoreAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, FirestoreAdminTransport) - if transport_provided: - # transport is a FirestoreAdminTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(FirestoreAdminTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - FirestoreAdminClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def create_index(self, - request: Optional[Union[firestore_admin.CreateIndexRequest, dict]] = None, - *, - parent: Optional[str] = None, - index: Optional[gfa_index.Index] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gac_operation.Operation: - r"""Creates a composite index. This returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the creation. The - metadata for the operation will be the type - [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_create_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateIndexRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.CreateIndexRequest, dict]): - The request object. The request for - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - index (google.cloud.firestore_admin_v1.types.Index): - Required. The composite index to - create. - - This corresponds to the ``index`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.Index` Cloud Firestore indexes enable simple and complex queries against - documents in a database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, index]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.CreateIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.CreateIndexRequest): - request = firestore_admin.CreateIndexRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if index is not None: - request.index = index - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_index] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_index.Index, - metadata_type=gfa_operation.IndexOperationMetadata, - ) - - # Done; return the response. - return response - - def list_indexes(self, - request: Optional[Union[firestore_admin.ListIndexesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListIndexesPager: - r"""Lists composite indexes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_indexes(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListIndexesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListIndexesRequest, dict]): - The request object. The request for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesPager: - The response for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.ListIndexesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.ListIndexesRequest): - request = firestore_admin.ListIndexesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_indexes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListIndexesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_index(self, - request: Optional[Union[firestore_admin.GetIndexRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> index.Index: - r"""Gets a composite index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetIndexRequest( - name="name_value", - ) - - # Make the request - response = client.get_index(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetIndexRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.Index: - Cloud Firestore indexes enable simple - and complex queries against documents in - a database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.GetIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.GetIndexRequest): - request = firestore_admin.GetIndexRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_index] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_index(self, - request: Optional[Union[firestore_admin.DeleteIndexRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a composite index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_delete_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteIndexRequest( - name="name_value", - ) - - # Make the request - client.delete_index(request=request) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DeleteIndexRequest, dict]): - The request object. The request for - [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.DeleteIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.DeleteIndexRequest): - request = firestore_admin.DeleteIndexRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_index] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_field(self, - request: Optional[Union[firestore_admin.GetFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> field.Field: - r"""Gets the metadata and configuration for a Field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetFieldRequest( - name="name_value", - ) - - # Make the request - response = client.get_field(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetFieldRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.Field: - Represents a single field in the - database. - Fields are grouped by their "Collection - Group", which represent all collections - in the database with the same id. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.GetFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.GetFieldRequest): - request = firestore_admin.GetFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_field(self, - request: Optional[Union[firestore_admin.UpdateFieldRequest, dict]] = None, - *, - field: Optional[gfa_field.Field] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gac_operation.Operation: - r"""Updates a field configuration. Currently, field updates apply - only to single field index configuration. However, calls to - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] - should provide a field mask to avoid changing any configuration - that the caller isn't aware of. The field mask should be - specified as: ``{ paths: "index_config" }``. - - This call returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the field update. The - metadata for the operation will be the type - [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. - - To configure the default field settings for the database, use - the special ``Field`` with resource name: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_update_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - field = firestore_admin_v1.Field() - field.name = "name_value" - - request = firestore_admin_v1.UpdateFieldRequest( - field=field, - ) - - # Make the request - operation = client.update_field(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.UpdateFieldRequest, dict]): - The request object. The request for - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - field (google.cloud.firestore_admin_v1.types.Field): - Required. The field to be updated. - This corresponds to the ``field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Field` - Represents a single field in the database. - - Fields are grouped by their "Collection Group", which - represent all collections in the database with the - same id. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([field]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.UpdateFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.UpdateFieldRequest): - request = firestore_admin.UpdateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if field is not None: - request.field = field - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("field.name", request.field.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_field.Field, - metadata_type=gfa_operation.FieldOperationMetadata, - ) - - # Done; return the response. - return response - - def list_fields(self, - request: Optional[Union[firestore_admin.ListFieldsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListFieldsPager: - r"""Lists the field configuration and metadata for this database. - - Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with the filter set to ``indexConfig.usesAncestorConfig:false`` - or ``ttlConfig:*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_fields(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListFieldsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_fields(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListFieldsRequest, dict]): - The request object. The request for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsPager: - The response for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.ListFieldsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.ListFieldsRequest): - request = firestore_admin.ListFieldsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_fields] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListFieldsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def export_documents(self, - request: Optional[Union[firestore_admin.ExportDocumentsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gac_operation.Operation: - r"""Exports a copy of all or a subset of documents from - Google Cloud Firestore to another storage system, such - as Google Cloud Storage. Recent updates to documents may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - For more details on export behavior and output format, - refer to: - - https://cloud.google.com/firestore/docs/manage-data/export-import - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_export_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ExportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.export_documents(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ExportDocumentsRequest, dict]): - The request object. The request for - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - name (str): - Required. Database to export. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.ExportDocumentsResponse` Returned in the [google.longrunning.Operation][google.longrunning.Operation] - response field. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.ExportDocumentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.ExportDocumentsRequest): - request = firestore_admin.ExportDocumentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_documents] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_operation.ExportDocumentsResponse, - metadata_type=gfa_operation.ExportDocumentsMetadata, - ) - - # Done; return the response. - return response - - def import_documents(self, - request: Optional[Union[firestore_admin.ImportDocumentsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gac_operation.Operation: - r"""Imports documents into Google Cloud Firestore. - Existing documents with the same name are overwritten. - The import occurs in the background and its progress can - be monitored and managed via the Operation resource that - is created. If an ImportDocuments operation is - cancelled, it is possible that a subset of the data has - already been imported to Cloud Firestore. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_import_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ImportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.import_documents(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ImportDocumentsRequest, dict]): - The request object. The request for - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - name (str): - Required. Database to import into. Should be of the - form: ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.ImportDocumentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.ImportDocumentsRequest): - request = firestore_admin.ImportDocumentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_documents] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=gfa_operation.ImportDocumentsMetadata, - ) - - # Done; return the response. - return response - - def create_database(self, - request: Optional[Union[firestore_admin.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database: Optional[gfa_database.Database] = None, - database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gac_operation.Operation: - r"""Create a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_create_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.CreateDatabaseRequest, dict]): - The request object. The request for - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. - parent (str): - Required. A parent name of the form - ``projects/{project_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database (google.cloud.firestore_admin_v1.types.Database): - Required. The Database to create. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (str): - Required. The ID to use for the database, which will - become the final component of the database's resource - name. - - This value should be 4-63 characters. Valid characters - are /[a-z][0-9]-/ with first character a letter and the - last a letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - "(default)" database id is also valid. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database, database_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.CreateDatabaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.CreateDatabaseRequest): - request = firestore_admin.CreateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database is not None: - request.database = database - if database_id is not None: - request.database_id = database_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_database.Database, - metadata_type=firestore_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - def get_database(self, - request: Optional[Union[firestore_admin.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> database.Database: - r"""Gets information about a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetDatabaseRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.Database: - A Cloud Firestore Database. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.GetDatabaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.GetDatabaseRequest): - request = firestore_admin.GetDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_databases(self, - request: Optional[Union[firestore_admin.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore_admin.ListDatabasesResponse: - r"""List all the databases in the project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_databases(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_databases(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListDatabasesRequest, dict]): - The request object. A request to list the Firestore - Databases in all locations for a - project. - parent (str): - Required. A parent name of the form - ``projects/{project_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.ListDatabasesResponse: - The list of databases for a project. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.ListDatabasesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.ListDatabasesRequest): - request = firestore_admin.ListDatabasesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_database(self, - request: Optional[Union[firestore_admin.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[gfa_database.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gac_operation.Operation: - r"""Updates a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_update_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateDatabaseRequest( - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest, dict]): - The request object. The request for - [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. - database (google.cloud.firestore_admin_v1.types.Database): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.UpdateDatabaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.UpdateDatabaseRequest): - request = firestore_admin.UpdateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_database.Database, - metadata_type=firestore_admin.UpdateDatabaseMetadata, - ) - - # Done; return the response. - return response - - def delete_database(self, - request: Optional[Union[firestore_admin.DeleteDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gac_operation.Operation: - r"""Deletes a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_delete_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest, dict]): - The request object. The request for - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.DeleteDatabaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.DeleteDatabaseRequest): - request = firestore_admin.DeleteDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - database.Database, - metadata_type=firestore_admin.DeleteDatabaseMetadata, - ) - - # Done; return the response. - return response - - def get_backup(self, - request: Optional[Union[firestore_admin.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> backup.Backup: - r"""Gets information about a backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetBackupRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. - name (str): - Required. Name of the backup to fetch. - - Format is - ``projects/{project}/locations/{location}/backups/{backup}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.Backup: - A Backup of a Cloud Firestore - Database. - The backup contains all documents and - index configurations for the given - database at a specific point in time. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.GetBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.GetBackupRequest): - request = firestore_admin.GetBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_backups(self, - request: Optional[Union[firestore_admin.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore_admin.ListBackupsResponse: - r"""Lists all the backups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_backups(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_backups(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListBackupsRequest, dict]): - The request object. The request for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - parent (str): - Required. The location to list backups from. - - Format is ``projects/{project}/locations/{location}``. - Use ``{location} = '-'`` to list backups from all - locations for the given project. This allows listing - backups from a single location or from all locations. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.ListBackupsResponse: - The response for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.ListBackupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.ListBackupsRequest): - request = firestore_admin.ListBackupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup(self, - request: Optional[Union[firestore_admin.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_delete_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - client.delete_backup(request=request) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DeleteBackupRequest, dict]): - The request object. The request for - [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. - name (str): - Required. Name of the backup to delete. - - format is - ``projects/{project}/locations/{location}/backups/{backup}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.DeleteBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.DeleteBackupRequest): - request = firestore_admin.DeleteBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def restore_database(self, - request: Optional[Union[firestore_admin.RestoreDatabaseRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gac_operation.Operation: - r"""Creates a new database by restoring from an existing backup. - - The new database must be in the same cloud region or - multi-region location as the existing backup. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing backup. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the restore, with the - Operation's [metadata][google.longrunning.Operation.metadata] - field type being the - [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the - restore was successful. The new database is not readable or - writeable until the LRO has completed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_restore_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.RestoreDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - backup="backup_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest, dict]): - The request object. The request message for - [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.RestoreDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.RestoreDatabaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.RestoreDatabaseRequest): - request = firestore_admin.RestoreDatabaseRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restore_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - database.Database, - metadata_type=gfa_operation.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - def create_backup_schedule(self, - request: Optional[Union[firestore_admin.CreateBackupScheduleRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_schedule: Optional[schedule.BackupSchedule] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schedule.BackupSchedule: - r"""Creates a backup schedule on a database. - At most two backup schedules can be configured on a - database, one daily backup schedule and one weekly - backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_create_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateBackupScheduleRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest, dict]): - The request object. The request for - [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. - parent (str): - Required. The parent database. - - Format ``projects/{project}/databases/{database}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): - Required. The backup schedule to - create. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_schedule]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.CreateBackupScheduleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.CreateBackupScheduleRequest): - request = firestore_admin.CreateBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_schedule is not None: - request.backup_schedule = backup_schedule - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_backup_schedule(self, - request: Optional[Union[firestore_admin.GetBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schedule.BackupSchedule: - r"""Gets information about a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. - name (str): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.GetBackupScheduleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.GetBackupScheduleRequest): - request = firestore_admin.GetBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_backup_schedules(self, - request: Optional[Union[firestore_admin.ListBackupSchedulesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore_admin.ListBackupSchedulesResponse: - r"""List backup schedules. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_backup_schedules(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_backup_schedules(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest, dict]): - The request object. The request for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - parent (str): - Required. The parent database. - - Format is ``projects/{project}/databases/{database}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse: - The response for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.ListBackupSchedulesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.ListBackupSchedulesRequest): - request = firestore_admin.ListBackupSchedulesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_schedules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_backup_schedule(self, - request: Optional[Union[firestore_admin.UpdateBackupScheduleRequest, dict]] = None, - *, - backup_schedule: Optional[schedule.BackupSchedule] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schedule.BackupSchedule: - r"""Updates a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_update_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = client.update_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest, dict]): - The request object. The request for - [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. - backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): - Required. The backup schedule to - update. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup_schedule, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.UpdateBackupScheduleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.UpdateBackupScheduleRequest): - request = firestore_admin.UpdateBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup_schedule.name", request.backup_schedule.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup_schedule(self, - request: Optional[Union[firestore_admin.DeleteBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_delete_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - client.delete_backup_schedule(request=request) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest, dict]): - The request object. The request for - [FirestoreAdmin.DeleteBackupSchedules][]. - name (str): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a firestore_admin.DeleteBackupScheduleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, firestore_admin.DeleteBackupScheduleRequest): - request = firestore_admin.DeleteBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "FirestoreAdminClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "FirestoreAdminClient", -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py deleted file mode 100644 index 65a4663eee..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py +++ /dev/null @@ -1,262 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index - - -class ListIndexesPager: - """A pager for iterating through ``list_indexes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``indexes`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListIndexes`` requests and continue to iterate - through the ``indexes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., firestore_admin.ListIndexesResponse], - request: firestore_admin.ListIndexesRequest, - response: firestore_admin.ListIndexesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_admin_v1.types.ListIndexesRequest): - The initial request object. - response (google.cloud.firestore_admin_v1.types.ListIndexesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore_admin.ListIndexesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore_admin.ListIndexesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[index.Index]: - for page in self.pages: - yield from page.indexes - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListIndexesAsyncPager: - """A pager for iterating through ``list_indexes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``indexes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListIndexes`` requests and continue to iterate - through the ``indexes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[firestore_admin.ListIndexesResponse]], - request: firestore_admin.ListIndexesRequest, - response: firestore_admin.ListIndexesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_admin_v1.types.ListIndexesRequest): - The initial request object. - response (google.cloud.firestore_admin_v1.types.ListIndexesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore_admin.ListIndexesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore_admin.ListIndexesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[index.Index]: - async def async_generator(): - async for page in self.pages: - for response in page.indexes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListFieldsPager: - """A pager for iterating through ``list_fields`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``fields`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListFields`` requests and continue to iterate - through the ``fields`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., firestore_admin.ListFieldsResponse], - request: firestore_admin.ListFieldsRequest, - response: firestore_admin.ListFieldsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_admin_v1.types.ListFieldsRequest): - The initial request object. - response (google.cloud.firestore_admin_v1.types.ListFieldsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore_admin.ListFieldsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore_admin.ListFieldsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[field.Field]: - for page in self.pages: - yield from page.fields - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListFieldsAsyncPager: - """A pager for iterating through ``list_fields`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``fields`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListFields`` requests and continue to iterate - through the ``fields`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[firestore_admin.ListFieldsResponse]], - request: firestore_admin.ListFieldsRequest, - response: firestore_admin.ListFieldsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_admin_v1.types.ListFieldsRequest): - The initial request object. - response (google.cloud.firestore_admin_v1.types.ListFieldsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore_admin.ListFieldsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore_admin.ListFieldsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[field.Field]: - async def async_generator(): - async for page in self.pages: - for response in page.fields: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py deleted file mode 100644 index e3727c9b57..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import FirestoreAdminTransport -from .grpc import FirestoreAdminGrpcTransport -from .grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport -from .rest import FirestoreAdminRestTransport -from .rest import FirestoreAdminRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreAdminTransport]] -_transport_registry['grpc'] = FirestoreAdminGrpcTransport -_transport_registry['grpc_asyncio'] = FirestoreAdminGrpcAsyncIOTransport -_transport_registry['rest'] = FirestoreAdminRestTransport - -__all__ = ( - 'FirestoreAdminTransport', - 'FirestoreAdminGrpcTransport', - 'FirestoreAdminGrpcAsyncIOTransport', - 'FirestoreAdminRestTransport', - 'FirestoreAdminRestInterceptor', -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py deleted file mode 100644 index ae11033e03..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py +++ /dev/null @@ -1,551 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.firestore_admin_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class FirestoreAdminTransport(abc.ABC): - """Abstract transport class for FirestoreAdmin.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', - ) - - DEFAULT_HOST: str = 'firestore.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_index: gapic_v1.method.wrap_method( - self.create_index, - default_timeout=60.0, - client_info=client_info, - ), - self.list_indexes: gapic_v1.method.wrap_method( - self.list_indexes, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_index: gapic_v1.method.wrap_method( - self.get_index, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_index: gapic_v1.method.wrap_method( - self.delete_index, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_field: gapic_v1.method.wrap_method( - self.get_field, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_field: gapic_v1.method.wrap_method( - self.update_field, - default_timeout=60.0, - client_info=client_info, - ), - self.list_fields: gapic_v1.method.wrap_method( - self.list_fields, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.export_documents: gapic_v1.method.wrap_method( - self.export_documents, - default_timeout=60.0, - client_info=client_info, - ), - self.import_documents: gapic_v1.method.wrap_method( - self.import_documents, - default_timeout=60.0, - client_info=client_info, - ), - self.create_database: gapic_v1.method.wrap_method( - self.create_database, - default_timeout=None, - client_info=client_info, - ), - self.get_database: gapic_v1.method.wrap_method( - self.get_database, - default_timeout=None, - client_info=client_info, - ), - self.list_databases: gapic_v1.method.wrap_method( - self.list_databases, - default_timeout=None, - client_info=client_info, - ), - self.update_database: gapic_v1.method.wrap_method( - self.update_database, - default_timeout=None, - client_info=client_info, - ), - self.delete_database: gapic_v1.method.wrap_method( - self.delete_database, - default_timeout=None, - client_info=client_info, - ), - self.get_backup: gapic_v1.method.wrap_method( - self.get_backup, - default_timeout=None, - client_info=client_info, - ), - self.list_backups: gapic_v1.method.wrap_method( - self.list_backups, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup: gapic_v1.method.wrap_method( - self.delete_backup, - default_timeout=None, - client_info=client_info, - ), - self.restore_database: gapic_v1.method.wrap_method( - self.restore_database, - default_timeout=None, - client_info=client_info, - ), - self.create_backup_schedule: gapic_v1.method.wrap_method( - self.create_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.get_backup_schedule: gapic_v1.method.wrap_method( - self.get_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.list_backup_schedules: gapic_v1.method.wrap_method( - self.list_backup_schedules, - default_timeout=None, - client_info=client_info, - ), - self.update_backup_schedule: gapic_v1.method.wrap_method( - self.update_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup_schedule: gapic_v1.method.wrap_method( - self.delete_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_index(self) -> Callable[ - [firestore_admin.CreateIndexRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_indexes(self) -> Callable[ - [firestore_admin.ListIndexesRequest], - Union[ - firestore_admin.ListIndexesResponse, - Awaitable[firestore_admin.ListIndexesResponse] - ]]: - raise NotImplementedError() - - @property - def get_index(self) -> Callable[ - [firestore_admin.GetIndexRequest], - Union[ - index.Index, - Awaitable[index.Index] - ]]: - raise NotImplementedError() - - @property - def delete_index(self) -> Callable[ - [firestore_admin.DeleteIndexRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_field(self) -> Callable[ - [firestore_admin.GetFieldRequest], - Union[ - field.Field, - Awaitable[field.Field] - ]]: - raise NotImplementedError() - - @property - def update_field(self) -> Callable[ - [firestore_admin.UpdateFieldRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_fields(self) -> Callable[ - [firestore_admin.ListFieldsRequest], - Union[ - firestore_admin.ListFieldsResponse, - Awaitable[firestore_admin.ListFieldsResponse] - ]]: - raise NotImplementedError() - - @property - def export_documents(self) -> Callable[ - [firestore_admin.ExportDocumentsRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def import_documents(self) -> Callable[ - [firestore_admin.ImportDocumentsRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_database(self) -> Callable[ - [firestore_admin.CreateDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_database(self) -> Callable[ - [firestore_admin.GetDatabaseRequest], - Union[ - database.Database, - Awaitable[database.Database] - ]]: - raise NotImplementedError() - - @property - def list_databases(self) -> Callable[ - [firestore_admin.ListDatabasesRequest], - Union[ - firestore_admin.ListDatabasesResponse, - Awaitable[firestore_admin.ListDatabasesResponse] - ]]: - raise NotImplementedError() - - @property - def update_database(self) -> Callable[ - [firestore_admin.UpdateDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_database(self) -> Callable[ - [firestore_admin.DeleteDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_backup(self) -> Callable[ - [firestore_admin.GetBackupRequest], - Union[ - backup.Backup, - Awaitable[backup.Backup] - ]]: - raise NotImplementedError() - - @property - def list_backups(self) -> Callable[ - [firestore_admin.ListBackupsRequest], - Union[ - firestore_admin.ListBackupsResponse, - Awaitable[firestore_admin.ListBackupsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_backup(self) -> Callable[ - [firestore_admin.DeleteBackupRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def restore_database(self) -> Callable[ - [firestore_admin.RestoreDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_backup_schedule(self) -> Callable[ - [firestore_admin.CreateBackupScheduleRequest], - Union[ - schedule.BackupSchedule, - Awaitable[schedule.BackupSchedule] - ]]: - raise NotImplementedError() - - @property - def get_backup_schedule(self) -> Callable[ - [firestore_admin.GetBackupScheduleRequest], - Union[ - schedule.BackupSchedule, - Awaitable[schedule.BackupSchedule] - ]]: - raise NotImplementedError() - - @property - def list_backup_schedules(self) -> Callable[ - [firestore_admin.ListBackupSchedulesRequest], - Union[ - firestore_admin.ListBackupSchedulesResponse, - Awaitable[firestore_admin.ListBackupSchedulesResponse] - ]]: - raise NotImplementedError() - - @property - def update_backup_schedule(self) -> Callable[ - [firestore_admin.UpdateBackupScheduleRequest], - Union[ - schedule.BackupSchedule, - Awaitable[schedule.BackupSchedule] - ]]: - raise NotImplementedError() - - @property - def delete_backup_schedule(self) -> Callable[ - [firestore_admin.DeleteBackupScheduleRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'FirestoreAdminTransport', -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py deleted file mode 100644 index 284a6cfe16..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py +++ /dev/null @@ -1,1032 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO - - -class FirestoreAdminGrpcTransport(FirestoreAdminTransport): - """gRPC backend transport for FirestoreAdmin. - - The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_index(self) -> Callable[ - [firestore_admin.CreateIndexRequest], - operations_pb2.Operation]: - r"""Return a callable for the create index method over gRPC. - - Creates a composite index. This returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the creation. The - metadata for the operation will be the type - [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. - - Returns: - Callable[[~.CreateIndexRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_index' not in self._stubs: - self._stubs['create_index'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/CreateIndex', - request_serializer=firestore_admin.CreateIndexRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_index'] - - @property - def list_indexes(self) -> Callable[ - [firestore_admin.ListIndexesRequest], - firestore_admin.ListIndexesResponse]: - r"""Return a callable for the list indexes method over gRPC. - - Lists composite indexes. - - Returns: - Callable[[~.ListIndexesRequest], - ~.ListIndexesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_indexes' not in self._stubs: - self._stubs['list_indexes'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListIndexes', - request_serializer=firestore_admin.ListIndexesRequest.serialize, - response_deserializer=firestore_admin.ListIndexesResponse.deserialize, - ) - return self._stubs['list_indexes'] - - @property - def get_index(self) -> Callable[ - [firestore_admin.GetIndexRequest], - index.Index]: - r"""Return a callable for the get index method over gRPC. - - Gets a composite index. - - Returns: - Callable[[~.GetIndexRequest], - ~.Index]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_index' not in self._stubs: - self._stubs['get_index'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetIndex', - request_serializer=firestore_admin.GetIndexRequest.serialize, - response_deserializer=index.Index.deserialize, - ) - return self._stubs['get_index'] - - @property - def delete_index(self) -> Callable[ - [firestore_admin.DeleteIndexRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete index method over gRPC. - - Deletes a composite index. - - Returns: - Callable[[~.DeleteIndexRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_index' not in self._stubs: - self._stubs['delete_index'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex', - request_serializer=firestore_admin.DeleteIndexRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_index'] - - @property - def get_field(self) -> Callable[ - [firestore_admin.GetFieldRequest], - field.Field]: - r"""Return a callable for the get field method over gRPC. - - Gets the metadata and configuration for a Field. - - Returns: - Callable[[~.GetFieldRequest], - ~.Field]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_field' not in self._stubs: - self._stubs['get_field'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetField', - request_serializer=firestore_admin.GetFieldRequest.serialize, - response_deserializer=field.Field.deserialize, - ) - return self._stubs['get_field'] - - @property - def update_field(self) -> Callable[ - [firestore_admin.UpdateFieldRequest], - operations_pb2.Operation]: - r"""Return a callable for the update field method over gRPC. - - Updates a field configuration. Currently, field updates apply - only to single field index configuration. However, calls to - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] - should provide a field mask to avoid changing any configuration - that the caller isn't aware of. The field mask should be - specified as: ``{ paths: "index_config" }``. - - This call returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the field update. The - metadata for the operation will be the type - [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. - - To configure the default field settings for the database, use - the special ``Field`` with resource name: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. - - Returns: - Callable[[~.UpdateFieldRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_field' not in self._stubs: - self._stubs['update_field'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/UpdateField', - request_serializer=firestore_admin.UpdateFieldRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_field'] - - @property - def list_fields(self) -> Callable[ - [firestore_admin.ListFieldsRequest], - firestore_admin.ListFieldsResponse]: - r"""Return a callable for the list fields method over gRPC. - - Lists the field configuration and metadata for this database. - - Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with the filter set to ``indexConfig.usesAncestorConfig:false`` - or ``ttlConfig:*``. - - Returns: - Callable[[~.ListFieldsRequest], - ~.ListFieldsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_fields' not in self._stubs: - self._stubs['list_fields'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListFields', - request_serializer=firestore_admin.ListFieldsRequest.serialize, - response_deserializer=firestore_admin.ListFieldsResponse.deserialize, - ) - return self._stubs['list_fields'] - - @property - def export_documents(self) -> Callable[ - [firestore_admin.ExportDocumentsRequest], - operations_pb2.Operation]: - r"""Return a callable for the export documents method over gRPC. - - Exports a copy of all or a subset of documents from - Google Cloud Firestore to another storage system, such - as Google Cloud Storage. Recent updates to documents may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - For more details on export behavior and output format, - refer to: - - https://cloud.google.com/firestore/docs/manage-data/export-import - - Returns: - Callable[[~.ExportDocumentsRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_documents' not in self._stubs: - self._stubs['export_documents'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments', - request_serializer=firestore_admin.ExportDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_documents'] - - @property - def import_documents(self) -> Callable[ - [firestore_admin.ImportDocumentsRequest], - operations_pb2.Operation]: - r"""Return a callable for the import documents method over gRPC. - - Imports documents into Google Cloud Firestore. - Existing documents with the same name are overwritten. - The import occurs in the background and its progress can - be monitored and managed via the Operation resource that - is created. If an ImportDocuments operation is - cancelled, it is possible that a subset of the data has - already been imported to Cloud Firestore. - - Returns: - Callable[[~.ImportDocumentsRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_documents' not in self._stubs: - self._stubs['import_documents'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments', - request_serializer=firestore_admin.ImportDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_documents'] - - @property - def create_database(self) -> Callable[ - [firestore_admin.CreateDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the create database method over gRPC. - - Create a database. - - Returns: - Callable[[~.CreateDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/CreateDatabase', - request_serializer=firestore_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_database'] - - @property - def get_database(self) -> Callable[ - [firestore_admin.GetDatabaseRequest], - database.Database]: - r"""Return a callable for the get database method over gRPC. - - Gets information about a database. - - Returns: - Callable[[~.GetDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetDatabase', - request_serializer=firestore_admin.GetDatabaseRequest.serialize, - response_deserializer=database.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def list_databases(self) -> Callable[ - [firestore_admin.ListDatabasesRequest], - firestore_admin.ListDatabasesResponse]: - r"""Return a callable for the list databases method over gRPC. - - List all the databases in the project. - - Returns: - Callable[[~.ListDatabasesRequest], - ~.ListDatabasesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListDatabases', - request_serializer=firestore_admin.ListDatabasesRequest.serialize, - response_deserializer=firestore_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def update_database(self) -> Callable[ - [firestore_admin.UpdateDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the update database method over gRPC. - - Updates a database. - - Returns: - Callable[[~.UpdateDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/UpdateDatabase', - request_serializer=firestore_admin.UpdateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database'] - - @property - def delete_database(self) -> Callable[ - [firestore_admin.DeleteDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete database method over gRPC. - - Deletes a database. - - Returns: - Callable[[~.DeleteDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_database' not in self._stubs: - self._stubs['delete_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/DeleteDatabase', - request_serializer=firestore_admin.DeleteDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_database'] - - @property - def get_backup(self) -> Callable[ - [firestore_admin.GetBackupRequest], - backup.Backup]: - r"""Return a callable for the get backup method over gRPC. - - Gets information about a backup. - - Returns: - Callable[[~.GetBackupRequest], - ~.Backup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetBackup', - request_serializer=firestore_admin.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def list_backups(self) -> Callable[ - [firestore_admin.ListBackupsRequest], - firestore_admin.ListBackupsResponse]: - r"""Return a callable for the list backups method over gRPC. - - Lists all the backups. - - Returns: - Callable[[~.ListBackupsRequest], - ~.ListBackupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListBackups', - request_serializer=firestore_admin.ListBackupsRequest.serialize, - response_deserializer=firestore_admin.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def delete_backup(self) -> Callable[ - [firestore_admin.DeleteBackupRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a backup. - - Returns: - Callable[[~.DeleteBackupRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/DeleteBackup', - request_serializer=firestore_admin.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup'] - - @property - def restore_database(self) -> Callable[ - [firestore_admin.RestoreDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the restore database method over gRPC. - - Creates a new database by restoring from an existing backup. - - The new database must be in the same cloud region or - multi-region location as the existing backup. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing backup. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the restore, with the - Operation's [metadata][google.longrunning.Operation.metadata] - field type being the - [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the - restore was successful. The new database is not readable or - writeable until the LRO has completed. - - Returns: - Callable[[~.RestoreDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_database' not in self._stubs: - self._stubs['restore_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/RestoreDatabase', - request_serializer=firestore_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_database'] - - @property - def create_backup_schedule(self) -> Callable[ - [firestore_admin.CreateBackupScheduleRequest], - schedule.BackupSchedule]: - r"""Return a callable for the create backup schedule method over gRPC. - - Creates a backup schedule on a database. - At most two backup schedules can be configured on a - database, one daily backup schedule and one weekly - backup schedule. - - Returns: - Callable[[~.CreateBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_schedule' not in self._stubs: - self._stubs['create_backup_schedule'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/CreateBackupSchedule', - request_serializer=firestore_admin.CreateBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs['create_backup_schedule'] - - @property - def get_backup_schedule(self) -> Callable[ - [firestore_admin.GetBackupScheduleRequest], - schedule.BackupSchedule]: - r"""Return a callable for the get backup schedule method over gRPC. - - Gets information about a backup schedule. - - Returns: - Callable[[~.GetBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_schedule' not in self._stubs: - self._stubs['get_backup_schedule'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetBackupSchedule', - request_serializer=firestore_admin.GetBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs['get_backup_schedule'] - - @property - def list_backup_schedules(self) -> Callable[ - [firestore_admin.ListBackupSchedulesRequest], - firestore_admin.ListBackupSchedulesResponse]: - r"""Return a callable for the list backup schedules method over gRPC. - - List backup schedules. - - Returns: - Callable[[~.ListBackupSchedulesRequest], - ~.ListBackupSchedulesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_schedules' not in self._stubs: - self._stubs['list_backup_schedules'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListBackupSchedules', - request_serializer=firestore_admin.ListBackupSchedulesRequest.serialize, - response_deserializer=firestore_admin.ListBackupSchedulesResponse.deserialize, - ) - return self._stubs['list_backup_schedules'] - - @property - def update_backup_schedule(self) -> Callable[ - [firestore_admin.UpdateBackupScheduleRequest], - schedule.BackupSchedule]: - r"""Return a callable for the update backup schedule method over gRPC. - - Updates a backup schedule. - - Returns: - Callable[[~.UpdateBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup_schedule' not in self._stubs: - self._stubs['update_backup_schedule'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/UpdateBackupSchedule', - request_serializer=firestore_admin.UpdateBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs['update_backup_schedule'] - - @property - def delete_backup_schedule(self) -> Callable[ - [firestore_admin.DeleteBackupScheduleRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete backup schedule method over gRPC. - - Deletes a backup schedule. - - Returns: - Callable[[~.DeleteBackupScheduleRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_schedule' not in self._stubs: - self._stubs['delete_backup_schedule'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/DeleteBackupSchedule', - request_serializer=firestore_admin.DeleteBackupScheduleRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup_schedule'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'FirestoreAdminGrpcTransport', -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py deleted file mode 100644 index edfc17104c..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py +++ /dev/null @@ -1,1031 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO -from .grpc import FirestoreAdminGrpcTransport - - -class FirestoreAdminGrpcAsyncIOTransport(FirestoreAdminTransport): - """gRPC AsyncIO backend transport for FirestoreAdmin. - - The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_index(self) -> Callable[ - [firestore_admin.CreateIndexRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create index method over gRPC. - - Creates a composite index. This returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the creation. The - metadata for the operation will be the type - [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. - - Returns: - Callable[[~.CreateIndexRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_index' not in self._stubs: - self._stubs['create_index'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/CreateIndex', - request_serializer=firestore_admin.CreateIndexRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_index'] - - @property - def list_indexes(self) -> Callable[ - [firestore_admin.ListIndexesRequest], - Awaitable[firestore_admin.ListIndexesResponse]]: - r"""Return a callable for the list indexes method over gRPC. - - Lists composite indexes. - - Returns: - Callable[[~.ListIndexesRequest], - Awaitable[~.ListIndexesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_indexes' not in self._stubs: - self._stubs['list_indexes'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListIndexes', - request_serializer=firestore_admin.ListIndexesRequest.serialize, - response_deserializer=firestore_admin.ListIndexesResponse.deserialize, - ) - return self._stubs['list_indexes'] - - @property - def get_index(self) -> Callable[ - [firestore_admin.GetIndexRequest], - Awaitable[index.Index]]: - r"""Return a callable for the get index method over gRPC. - - Gets a composite index. - - Returns: - Callable[[~.GetIndexRequest], - Awaitable[~.Index]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_index' not in self._stubs: - self._stubs['get_index'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetIndex', - request_serializer=firestore_admin.GetIndexRequest.serialize, - response_deserializer=index.Index.deserialize, - ) - return self._stubs['get_index'] - - @property - def delete_index(self) -> Callable[ - [firestore_admin.DeleteIndexRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete index method over gRPC. - - Deletes a composite index. - - Returns: - Callable[[~.DeleteIndexRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_index' not in self._stubs: - self._stubs['delete_index'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex', - request_serializer=firestore_admin.DeleteIndexRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_index'] - - @property - def get_field(self) -> Callable[ - [firestore_admin.GetFieldRequest], - Awaitable[field.Field]]: - r"""Return a callable for the get field method over gRPC. - - Gets the metadata and configuration for a Field. - - Returns: - Callable[[~.GetFieldRequest], - Awaitable[~.Field]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_field' not in self._stubs: - self._stubs['get_field'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetField', - request_serializer=firestore_admin.GetFieldRequest.serialize, - response_deserializer=field.Field.deserialize, - ) - return self._stubs['get_field'] - - @property - def update_field(self) -> Callable[ - [firestore_admin.UpdateFieldRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update field method over gRPC. - - Updates a field configuration. Currently, field updates apply - only to single field index configuration. However, calls to - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] - should provide a field mask to avoid changing any configuration - that the caller isn't aware of. The field mask should be - specified as: ``{ paths: "index_config" }``. - - This call returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the field update. The - metadata for the operation will be the type - [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. - - To configure the default field settings for the database, use - the special ``Field`` with resource name: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. - - Returns: - Callable[[~.UpdateFieldRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_field' not in self._stubs: - self._stubs['update_field'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/UpdateField', - request_serializer=firestore_admin.UpdateFieldRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_field'] - - @property - def list_fields(self) -> Callable[ - [firestore_admin.ListFieldsRequest], - Awaitable[firestore_admin.ListFieldsResponse]]: - r"""Return a callable for the list fields method over gRPC. - - Lists the field configuration and metadata for this database. - - Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with the filter set to ``indexConfig.usesAncestorConfig:false`` - or ``ttlConfig:*``. - - Returns: - Callable[[~.ListFieldsRequest], - Awaitable[~.ListFieldsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_fields' not in self._stubs: - self._stubs['list_fields'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListFields', - request_serializer=firestore_admin.ListFieldsRequest.serialize, - response_deserializer=firestore_admin.ListFieldsResponse.deserialize, - ) - return self._stubs['list_fields'] - - @property - def export_documents(self) -> Callable[ - [firestore_admin.ExportDocumentsRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the export documents method over gRPC. - - Exports a copy of all or a subset of documents from - Google Cloud Firestore to another storage system, such - as Google Cloud Storage. Recent updates to documents may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - For more details on export behavior and output format, - refer to: - - https://cloud.google.com/firestore/docs/manage-data/export-import - - Returns: - Callable[[~.ExportDocumentsRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_documents' not in self._stubs: - self._stubs['export_documents'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments', - request_serializer=firestore_admin.ExportDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_documents'] - - @property - def import_documents(self) -> Callable[ - [firestore_admin.ImportDocumentsRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the import documents method over gRPC. - - Imports documents into Google Cloud Firestore. - Existing documents with the same name are overwritten. - The import occurs in the background and its progress can - be monitored and managed via the Operation resource that - is created. If an ImportDocuments operation is - cancelled, it is possible that a subset of the data has - already been imported to Cloud Firestore. - - Returns: - Callable[[~.ImportDocumentsRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_documents' not in self._stubs: - self._stubs['import_documents'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments', - request_serializer=firestore_admin.ImportDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_documents'] - - @property - def create_database(self) -> Callable[ - [firestore_admin.CreateDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create database method over gRPC. - - Create a database. - - Returns: - Callable[[~.CreateDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/CreateDatabase', - request_serializer=firestore_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_database'] - - @property - def get_database(self) -> Callable[ - [firestore_admin.GetDatabaseRequest], - Awaitable[database.Database]]: - r"""Return a callable for the get database method over gRPC. - - Gets information about a database. - - Returns: - Callable[[~.GetDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetDatabase', - request_serializer=firestore_admin.GetDatabaseRequest.serialize, - response_deserializer=database.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def list_databases(self) -> Callable[ - [firestore_admin.ListDatabasesRequest], - Awaitable[firestore_admin.ListDatabasesResponse]]: - r"""Return a callable for the list databases method over gRPC. - - List all the databases in the project. - - Returns: - Callable[[~.ListDatabasesRequest], - Awaitable[~.ListDatabasesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListDatabases', - request_serializer=firestore_admin.ListDatabasesRequest.serialize, - response_deserializer=firestore_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def update_database(self) -> Callable[ - [firestore_admin.UpdateDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update database method over gRPC. - - Updates a database. - - Returns: - Callable[[~.UpdateDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/UpdateDatabase', - request_serializer=firestore_admin.UpdateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database'] - - @property - def delete_database(self) -> Callable[ - [firestore_admin.DeleteDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete database method over gRPC. - - Deletes a database. - - Returns: - Callable[[~.DeleteDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_database' not in self._stubs: - self._stubs['delete_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/DeleteDatabase', - request_serializer=firestore_admin.DeleteDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_database'] - - @property - def get_backup(self) -> Callable[ - [firestore_admin.GetBackupRequest], - Awaitable[backup.Backup]]: - r"""Return a callable for the get backup method over gRPC. - - Gets information about a backup. - - Returns: - Callable[[~.GetBackupRequest], - Awaitable[~.Backup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetBackup', - request_serializer=firestore_admin.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def list_backups(self) -> Callable[ - [firestore_admin.ListBackupsRequest], - Awaitable[firestore_admin.ListBackupsResponse]]: - r"""Return a callable for the list backups method over gRPC. - - Lists all the backups. - - Returns: - Callable[[~.ListBackupsRequest], - Awaitable[~.ListBackupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListBackups', - request_serializer=firestore_admin.ListBackupsRequest.serialize, - response_deserializer=firestore_admin.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def delete_backup(self) -> Callable[ - [firestore_admin.DeleteBackupRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a backup. - - Returns: - Callable[[~.DeleteBackupRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/DeleteBackup', - request_serializer=firestore_admin.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup'] - - @property - def restore_database(self) -> Callable[ - [firestore_admin.RestoreDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the restore database method over gRPC. - - Creates a new database by restoring from an existing backup. - - The new database must be in the same cloud region or - multi-region location as the existing backup. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing backup. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the restore, with the - Operation's [metadata][google.longrunning.Operation.metadata] - field type being the - [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the - restore was successful. The new database is not readable or - writeable until the LRO has completed. - - Returns: - Callable[[~.RestoreDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_database' not in self._stubs: - self._stubs['restore_database'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/RestoreDatabase', - request_serializer=firestore_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_database'] - - @property - def create_backup_schedule(self) -> Callable[ - [firestore_admin.CreateBackupScheduleRequest], - Awaitable[schedule.BackupSchedule]]: - r"""Return a callable for the create backup schedule method over gRPC. - - Creates a backup schedule on a database. - At most two backup schedules can be configured on a - database, one daily backup schedule and one weekly - backup schedule. - - Returns: - Callable[[~.CreateBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_schedule' not in self._stubs: - self._stubs['create_backup_schedule'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/CreateBackupSchedule', - request_serializer=firestore_admin.CreateBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs['create_backup_schedule'] - - @property - def get_backup_schedule(self) -> Callable[ - [firestore_admin.GetBackupScheduleRequest], - Awaitable[schedule.BackupSchedule]]: - r"""Return a callable for the get backup schedule method over gRPC. - - Gets information about a backup schedule. - - Returns: - Callable[[~.GetBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_schedule' not in self._stubs: - self._stubs['get_backup_schedule'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/GetBackupSchedule', - request_serializer=firestore_admin.GetBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs['get_backup_schedule'] - - @property - def list_backup_schedules(self) -> Callable[ - [firestore_admin.ListBackupSchedulesRequest], - Awaitable[firestore_admin.ListBackupSchedulesResponse]]: - r"""Return a callable for the list backup schedules method over gRPC. - - List backup schedules. - - Returns: - Callable[[~.ListBackupSchedulesRequest], - Awaitable[~.ListBackupSchedulesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_schedules' not in self._stubs: - self._stubs['list_backup_schedules'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/ListBackupSchedules', - request_serializer=firestore_admin.ListBackupSchedulesRequest.serialize, - response_deserializer=firestore_admin.ListBackupSchedulesResponse.deserialize, - ) - return self._stubs['list_backup_schedules'] - - @property - def update_backup_schedule(self) -> Callable[ - [firestore_admin.UpdateBackupScheduleRequest], - Awaitable[schedule.BackupSchedule]]: - r"""Return a callable for the update backup schedule method over gRPC. - - Updates a backup schedule. - - Returns: - Callable[[~.UpdateBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup_schedule' not in self._stubs: - self._stubs['update_backup_schedule'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/UpdateBackupSchedule', - request_serializer=firestore_admin.UpdateBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs['update_backup_schedule'] - - @property - def delete_backup_schedule(self) -> Callable[ - [firestore_admin.DeleteBackupScheduleRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete backup schedule method over gRPC. - - Deletes a backup schedule. - - Returns: - Callable[[~.DeleteBackupScheduleRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_schedule' not in self._stubs: - self._stubs['delete_backup_schedule'] = self.grpc_channel.unary_unary( - '/google.firestore.admin.v1.FirestoreAdmin/DeleteBackupSchedule', - request_serializer=firestore_admin.DeleteBackupScheduleRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup_schedule'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'FirestoreAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py deleted file mode 100644 index bc341d7348..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py +++ /dev/null @@ -1,3178 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import schedule -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class FirestoreAdminRestInterceptor: - """Interceptor for FirestoreAdmin. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the FirestoreAdminRestTransport. - - .. code-block:: python - class MyCustomFirestoreAdminInterceptor(FirestoreAdminRestInterceptor): - def pre_create_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_index(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_index(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_index(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_export_documents(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_export_documents(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_field(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_field(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_index(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_index(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_import_documents(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_import_documents(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backups(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backups(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backup_schedules(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backup_schedules(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_databases(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_databases(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_fields(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_fields(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_indexes(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_indexes(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_restore_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_restore_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_field(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_field(self, response): - logging.log(f"Received response: {response}") - return response - - transport = FirestoreAdminRestTransport(interceptor=MyCustomFirestoreAdminInterceptor()) - client = FirestoreAdminClient(transport=transport) - - - """ - def pre_create_backup_schedule(self, request: firestore_admin.CreateBackupScheduleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.CreateBackupScheduleRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_create_backup_schedule(self, response: schedule.BackupSchedule) -> schedule.BackupSchedule: - """Post-rpc interceptor for create_backup_schedule - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_create_database(self, request: firestore_admin.CreateDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.CreateDatabaseRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_create_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_database - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_create_index(self, request: firestore_admin.CreateIndexRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.CreateIndexRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_index - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_create_index(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_index - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_delete_backup(self, request: firestore_admin.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.DeleteBackupRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def pre_delete_backup_schedule(self, request: firestore_admin.DeleteBackupScheduleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.DeleteBackupScheduleRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def pre_delete_database(self, request: firestore_admin.DeleteDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.DeleteDatabaseRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_delete_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_database - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_delete_index(self, request: firestore_admin.DeleteIndexRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.DeleteIndexRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_index - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def pre_export_documents(self, request: firestore_admin.ExportDocumentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ExportDocumentsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for export_documents - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_export_documents(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for export_documents - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_get_backup(self, request: firestore_admin.GetBackupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetBackupRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_backup(self, response: backup.Backup) -> backup.Backup: - """Post-rpc interceptor for get_backup - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_get_backup_schedule(self, request: firestore_admin.GetBackupScheduleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetBackupScheduleRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_backup_schedule(self, response: schedule.BackupSchedule) -> schedule.BackupSchedule: - """Post-rpc interceptor for get_backup_schedule - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_get_database(self, request: firestore_admin.GetDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetDatabaseRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_database(self, response: database.Database) -> database.Database: - """Post-rpc interceptor for get_database - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_get_field(self, request: firestore_admin.GetFieldRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetFieldRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_field - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_field(self, response: field.Field) -> field.Field: - """Post-rpc interceptor for get_field - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_get_index(self, request: firestore_admin.GetIndexRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.GetIndexRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_index - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_index(self, response: index.Index) -> index.Index: - """Post-rpc interceptor for get_index - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_import_documents(self, request: firestore_admin.ImportDocumentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ImportDocumentsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for import_documents - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_import_documents(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for import_documents - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_list_backups(self, request: firestore_admin.ListBackupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListBackupsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_backups - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_backups(self, response: firestore_admin.ListBackupsResponse) -> firestore_admin.ListBackupsResponse: - """Post-rpc interceptor for list_backups - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_list_backup_schedules(self, request: firestore_admin.ListBackupSchedulesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListBackupSchedulesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_backup_schedules - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_backup_schedules(self, response: firestore_admin.ListBackupSchedulesResponse) -> firestore_admin.ListBackupSchedulesResponse: - """Post-rpc interceptor for list_backup_schedules - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_list_databases(self, request: firestore_admin.ListDatabasesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListDatabasesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_databases - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_databases(self, response: firestore_admin.ListDatabasesResponse) -> firestore_admin.ListDatabasesResponse: - """Post-rpc interceptor for list_databases - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_list_fields(self, request: firestore_admin.ListFieldsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListFieldsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_fields - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_fields(self, response: firestore_admin.ListFieldsResponse) -> firestore_admin.ListFieldsResponse: - """Post-rpc interceptor for list_fields - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_list_indexes(self, request: firestore_admin.ListIndexesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.ListIndexesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_indexes - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_indexes(self, response: firestore_admin.ListIndexesResponse) -> firestore_admin.ListIndexesResponse: - """Post-rpc interceptor for list_indexes - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_restore_database(self, request: firestore_admin.RestoreDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.RestoreDatabaseRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for restore_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_restore_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for restore_database - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_update_backup_schedule(self, request: firestore_admin.UpdateBackupScheduleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.UpdateBackupScheduleRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_update_backup_schedule(self, response: schedule.BackupSchedule) -> schedule.BackupSchedule: - """Post-rpc interceptor for update_backup_schedule - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_update_database(self, request: firestore_admin.UpdateDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.UpdateDatabaseRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_update_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_database - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_update_field(self, request: firestore_admin.UpdateFieldRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[firestore_admin.UpdateFieldRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_field - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_update_field(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_field - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class FirestoreAdminRestStub: - _session: AuthorizedSession - _host: str - _interceptor: FirestoreAdminRestInterceptor - - -class FirestoreAdminRestTransport(FirestoreAdminTransport): - """REST backend transport for FirestoreAdmin. - - The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'firestore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[FirestoreAdminRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or FirestoreAdminRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/databases/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/databases/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateBackupSchedule(FirestoreAdminRestStub): - def __hash__(self): - return hash("CreateBackupSchedule") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.CreateBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> schedule.BackupSchedule: - r"""Call the create backup schedule method over HTTP. - - Args: - request (~.firestore_admin.CreateBackupScheduleRequest): - The request object. The request for - [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.schedule.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*}/backupSchedules', - 'body': 'backup_schedule', - }, - ] - request, metadata = self._interceptor.pre_create_backup_schedule(request, metadata) - pb_request = firestore_admin.CreateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = schedule.BackupSchedule() - pb_resp = schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_backup_schedule(resp) - return resp - - class _CreateDatabase(FirestoreAdminRestStub): - def __hash__(self): - return hash("CreateDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "databaseId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.CreateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create database method over HTTP. - - Args: - request (~.firestore_admin.CreateDatabaseRequest): - The request object. The request for - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/databases', - 'body': 'database', - }, - ] - request, metadata = self._interceptor.pre_create_database(request, metadata) - pb_request = firestore_admin.CreateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_database(resp) - return resp - - class _CreateIndex(FirestoreAdminRestStub): - def __hash__(self): - return hash("CreateIndex") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.CreateIndexRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create index method over HTTP. - - Args: - request (~.firestore_admin.CreateIndexRequest): - The request object. The request for - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes', - 'body': 'index', - }, - ] - request, metadata = self._interceptor.pre_create_index(request, metadata) - pb_request = firestore_admin.CreateIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_index(resp) - return resp - - class _DeleteBackup(FirestoreAdminRestStub): - def __hash__(self): - return hash("DeleteBackup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.DeleteBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete backup method over HTTP. - - Args: - request (~.firestore_admin.DeleteBackupRequest): - The request object. The request for - [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/backups/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_backup(request, metadata) - pb_request = firestore_admin.DeleteBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteBackupSchedule(FirestoreAdminRestStub): - def __hash__(self): - return hash("DeleteBackupSchedule") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.DeleteBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete backup schedule method over HTTP. - - Args: - request (~.firestore_admin.DeleteBackupScheduleRequest): - The request object. The request for - [FirestoreAdmin.DeleteBackupSchedules][]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/databases/*/backupSchedules/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_backup_schedule(request, metadata) - pb_request = firestore_admin.DeleteBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteDatabase(FirestoreAdminRestStub): - def __hash__(self): - return hash("DeleteDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.DeleteDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete database method over HTTP. - - Args: - request (~.firestore_admin.DeleteDatabaseRequest): - The request object. The request for - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/databases/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_database(request, metadata) - pb_request = firestore_admin.DeleteDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_database(resp) - return resp - - class _DeleteIndex(FirestoreAdminRestStub): - def __hash__(self): - return hash("DeleteIndex") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.DeleteIndexRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete index method over HTTP. - - Args: - request (~.firestore_admin.DeleteIndexRequest): - The request object. The request for - [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_index(request, metadata) - pb_request = firestore_admin.DeleteIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _ExportDocuments(FirestoreAdminRestStub): - def __hash__(self): - return hash("ExportDocuments") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.ExportDocumentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the export documents method over HTTP. - - Args: - request (~.firestore_admin.ExportDocumentsRequest): - The request object. The request for - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/databases/*}:exportDocuments', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_export_documents(request, metadata) - pb_request = firestore_admin.ExportDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_documents(resp) - return resp - - class _GetBackup(FirestoreAdminRestStub): - def __hash__(self): - return hash("GetBackup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.GetBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> backup.Backup: - r"""Call the get backup method over HTTP. - - Args: - request (~.firestore_admin.GetBackupRequest): - The request object. The request for - [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.backup.Backup: - A Backup of a Cloud Firestore - Database. - The backup contains all documents and - index configurations for the given - database at a specific point in time. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/backups/*}', - }, - ] - request, metadata = self._interceptor.pre_get_backup(request, metadata) - pb_request = firestore_admin.GetBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup.Backup() - pb_resp = backup.Backup.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_backup(resp) - return resp - - class _GetBackupSchedule(FirestoreAdminRestStub): - def __hash__(self): - return hash("GetBackupSchedule") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.GetBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> schedule.BackupSchedule: - r"""Call the get backup schedule method over HTTP. - - Args: - request (~.firestore_admin.GetBackupScheduleRequest): - The request object. The request for - [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.schedule.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*/backupSchedules/*}', - }, - ] - request, metadata = self._interceptor.pre_get_backup_schedule(request, metadata) - pb_request = firestore_admin.GetBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = schedule.BackupSchedule() - pb_resp = schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_backup_schedule(resp) - return resp - - class _GetDatabase(FirestoreAdminRestStub): - def __hash__(self): - return hash("GetDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.GetDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> database.Database: - r"""Call the get database method over HTTP. - - Args: - request (~.firestore_admin.GetDatabaseRequest): - The request object. The request for - [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.database.Database: - A Cloud Firestore Database. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*}', - }, - ] - request, metadata = self._interceptor.pre_get_database(request, metadata) - pb_request = firestore_admin.GetDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = database.Database() - pb_resp = database.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_database(resp) - return resp - - class _GetField(FirestoreAdminRestStub): - def __hash__(self): - return hash("GetField") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.GetFieldRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> field.Field: - r"""Call the get field method over HTTP. - - Args: - request (~.firestore_admin.GetFieldRequest): - The request object. The request for - [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.field.Field: - Represents a single field in the - database. - Fields are grouped by their "Collection - Group", which represent all collections - in the database with the same id. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}', - }, - ] - request, metadata = self._interceptor.pre_get_field(request, metadata) - pb_request = firestore_admin.GetFieldRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = field.Field() - pb_resp = field.Field.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_field(resp) - return resp - - class _GetIndex(FirestoreAdminRestStub): - def __hash__(self): - return hash("GetIndex") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.GetIndexRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> index.Index: - r"""Call the get index method over HTTP. - - Args: - request (~.firestore_admin.GetIndexRequest): - The request object. The request for - [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.index.Index: - Cloud Firestore indexes enable simple - and complex queries against documents in - a database. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}', - }, - ] - request, metadata = self._interceptor.pre_get_index(request, metadata) - pb_request = firestore_admin.GetIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = index.Index() - pb_resp = index.Index.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_index(resp) - return resp - - class _ImportDocuments(FirestoreAdminRestStub): - def __hash__(self): - return hash("ImportDocuments") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.ImportDocumentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the import documents method over HTTP. - - Args: - request (~.firestore_admin.ImportDocumentsRequest): - The request object. The request for - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/databases/*}:importDocuments', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_import_documents(request, metadata) - pb_request = firestore_admin.ImportDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_import_documents(resp) - return resp - - class _ListBackups(FirestoreAdminRestStub): - def __hash__(self): - return hash("ListBackups") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.ListBackupsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore_admin.ListBackupsResponse: - r"""Call the list backups method over HTTP. - - Args: - request (~.firestore_admin.ListBackupsRequest): - The request object. The request for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore_admin.ListBackupsResponse: - The response for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/backups', - }, - ] - request, metadata = self._interceptor.pre_list_backups(request, metadata) - pb_request = firestore_admin.ListBackupsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListBackupsResponse() - pb_resp = firestore_admin.ListBackupsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_backups(resp) - return resp - - class _ListBackupSchedules(FirestoreAdminRestStub): - def __hash__(self): - return hash("ListBackupSchedules") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.ListBackupSchedulesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore_admin.ListBackupSchedulesResponse: - r"""Call the list backup schedules method over HTTP. - - Args: - request (~.firestore_admin.ListBackupSchedulesRequest): - The request object. The request for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore_admin.ListBackupSchedulesResponse: - The response for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/databases/*}/backupSchedules', - }, - ] - request, metadata = self._interceptor.pre_list_backup_schedules(request, metadata) - pb_request = firestore_admin.ListBackupSchedulesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListBackupSchedulesResponse() - pb_resp = firestore_admin.ListBackupSchedulesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_backup_schedules(resp) - return resp - - class _ListDatabases(FirestoreAdminRestStub): - def __hash__(self): - return hash("ListDatabases") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.ListDatabasesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore_admin.ListDatabasesResponse: - r"""Call the list databases method over HTTP. - - Args: - request (~.firestore_admin.ListDatabasesRequest): - The request object. A request to list the Firestore - Databases in all locations for a - project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore_admin.ListDatabasesResponse: - The list of databases for a project. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*}/databases', - }, - ] - request, metadata = self._interceptor.pre_list_databases(request, metadata) - pb_request = firestore_admin.ListDatabasesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListDatabasesResponse() - pb_resp = firestore_admin.ListDatabasesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_databases(resp) - return resp - - class _ListFields(FirestoreAdminRestStub): - def __hash__(self): - return hash("ListFields") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.ListFieldsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore_admin.ListFieldsResponse: - r"""Call the list fields method over HTTP. - - Args: - request (~.firestore_admin.ListFieldsRequest): - The request object. The request for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore_admin.ListFieldsResponse: - The response for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields', - }, - ] - request, metadata = self._interceptor.pre_list_fields(request, metadata) - pb_request = firestore_admin.ListFieldsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListFieldsResponse() - pb_resp = firestore_admin.ListFieldsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_fields(resp) - return resp - - class _ListIndexes(FirestoreAdminRestStub): - def __hash__(self): - return hash("ListIndexes") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.ListIndexesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> firestore_admin.ListIndexesResponse: - r"""Call the list indexes method over HTTP. - - Args: - request (~.firestore_admin.ListIndexesRequest): - The request object. The request for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore_admin.ListIndexesResponse: - The response for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes', - }, - ] - request, metadata = self._interceptor.pre_list_indexes(request, metadata) - pb_request = firestore_admin.ListIndexesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListIndexesResponse() - pb_resp = firestore_admin.ListIndexesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_indexes(resp) - return resp - - class _RestoreDatabase(FirestoreAdminRestStub): - def __hash__(self): - return hash("RestoreDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.RestoreDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the restore database method over HTTP. - - Args: - request (~.firestore_admin.RestoreDatabaseRequest): - The request object. The request message for - [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.RestoreDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/databases:restore', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_restore_database(request, metadata) - pb_request = firestore_admin.RestoreDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_restore_database(resp) - return resp - - class _UpdateBackupSchedule(FirestoreAdminRestStub): - def __hash__(self): - return hash("UpdateBackupSchedule") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.UpdateBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> schedule.BackupSchedule: - r"""Call the update backup schedule method over HTTP. - - Args: - request (~.firestore_admin.UpdateBackupScheduleRequest): - The request object. The request for - [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.schedule.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}', - 'body': 'backup_schedule', - }, - ] - request, metadata = self._interceptor.pre_update_backup_schedule(request, metadata) - pb_request = firestore_admin.UpdateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = schedule.BackupSchedule() - pb_resp = schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_backup_schedule(resp) - return resp - - class _UpdateDatabase(FirestoreAdminRestStub): - def __hash__(self): - return hash("UpdateDatabase") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.UpdateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the update database method over HTTP. - - Args: - request (~.firestore_admin.UpdateDatabaseRequest): - The request object. The request for - [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{database.name=projects/*/databases/*}', - 'body': 'database', - }, - ] - request, metadata = self._interceptor.pre_update_database(request, metadata) - pb_request = firestore_admin.UpdateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_database(resp) - return resp - - class _UpdateField(FirestoreAdminRestStub): - def __hash__(self): - return hash("UpdateField") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: firestore_admin.UpdateFieldRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the update field method over HTTP. - - Args: - request (~.firestore_admin.UpdateFieldRequest): - The request object. The request for - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}', - 'body': 'field', - }, - ] - request, metadata = self._interceptor.pre_update_field(request, metadata) - pb_request = firestore_admin.UpdateFieldRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_field(resp) - return resp - - @property - def create_backup_schedule(self) -> Callable[ - [firestore_admin.CreateBackupScheduleRequest], - schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_database(self) -> Callable[ - [firestore_admin.CreateDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_index(self) -> Callable[ - [firestore_admin.CreateIndexRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateIndex(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup(self) -> Callable[ - [firestore_admin.DeleteBackupRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup_schedule(self) -> Callable[ - [firestore_admin.DeleteBackupScheduleRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_database(self) -> Callable[ - [firestore_admin.DeleteDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_index(self) -> Callable[ - [firestore_admin.DeleteIndexRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteIndex(self._session, self._host, self._interceptor) # type: ignore - - @property - def export_documents(self) -> Callable[ - [firestore_admin.ExportDocumentsRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExportDocuments(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup(self) -> Callable[ - [firestore_admin.GetBackupRequest], - backup.Backup]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup_schedule(self) -> Callable[ - [firestore_admin.GetBackupScheduleRequest], - schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_database(self) -> Callable[ - [firestore_admin.GetDatabaseRequest], - database.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_field(self) -> Callable[ - [firestore_admin.GetFieldRequest], - field.Field]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetField(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_index(self) -> Callable[ - [firestore_admin.GetIndexRequest], - index.Index]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIndex(self._session, self._host, self._interceptor) # type: ignore - - @property - def import_documents(self) -> Callable[ - [firestore_admin.ImportDocumentsRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ImportDocuments(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backups(self) -> Callable[ - [firestore_admin.ListBackupsRequest], - firestore_admin.ListBackupsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backup_schedules(self) -> Callable[ - [firestore_admin.ListBackupSchedulesRequest], - firestore_admin.ListBackupSchedulesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackupSchedules(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_databases(self) -> Callable[ - [firestore_admin.ListDatabasesRequest], - firestore_admin.ListDatabasesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_fields(self) -> Callable[ - [firestore_admin.ListFieldsRequest], - firestore_admin.ListFieldsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListFields(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_indexes(self) -> Callable[ - [firestore_admin.ListIndexesRequest], - firestore_admin.ListIndexesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListIndexes(self._session, self._host, self._interceptor) # type: ignore - - @property - def restore_database(self) -> Callable[ - [firestore_admin.RestoreDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RestoreDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_backup_schedule(self) -> Callable[ - [firestore_admin.UpdateBackupScheduleRequest], - schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_database(self) -> Callable[ - [firestore_admin.UpdateDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_field(self) -> Callable[ - [firestore_admin.UpdateFieldRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateField(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(FirestoreAdminRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/databases/*/operations/*}:cancel', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(FirestoreAdminRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/databases/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(FirestoreAdminRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(FirestoreAdminRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/databases/*}/operations', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'FirestoreAdminRestTransport', -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/__init__.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/__init__.py deleted file mode 100644 index ea202681cd..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/__init__.py +++ /dev/null @@ -1,128 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .backup import ( - Backup, -) -from .database import ( - Database, -) -from .field import ( - Field, -) -from .firestore_admin import ( - CreateBackupScheduleRequest, - CreateDatabaseMetadata, - CreateDatabaseRequest, - CreateIndexRequest, - DeleteBackupRequest, - DeleteBackupScheduleRequest, - DeleteDatabaseMetadata, - DeleteDatabaseRequest, - DeleteIndexRequest, - ExportDocumentsRequest, - GetBackupRequest, - GetBackupScheduleRequest, - GetDatabaseRequest, - GetFieldRequest, - GetIndexRequest, - ImportDocumentsRequest, - ListBackupSchedulesRequest, - ListBackupSchedulesResponse, - ListBackupsRequest, - ListBackupsResponse, - ListDatabasesRequest, - ListDatabasesResponse, - ListFieldsRequest, - ListFieldsResponse, - ListIndexesRequest, - ListIndexesResponse, - RestoreDatabaseRequest, - UpdateBackupScheduleRequest, - UpdateDatabaseMetadata, - UpdateDatabaseRequest, - UpdateFieldRequest, -) -from .index import ( - Index, -) -from .location import ( - LocationMetadata, -) -from .operation import ( - ExportDocumentsMetadata, - ExportDocumentsResponse, - FieldOperationMetadata, - ImportDocumentsMetadata, - IndexOperationMetadata, - Progress, - RestoreDatabaseMetadata, - OperationState, -) -from .schedule import ( - BackupSchedule, - DailyRecurrence, - WeeklyRecurrence, -) - -__all__ = ( - 'Backup', - 'Database', - 'Field', - 'CreateBackupScheduleRequest', - 'CreateDatabaseMetadata', - 'CreateDatabaseRequest', - 'CreateIndexRequest', - 'DeleteBackupRequest', - 'DeleteBackupScheduleRequest', - 'DeleteDatabaseMetadata', - 'DeleteDatabaseRequest', - 'DeleteIndexRequest', - 'ExportDocumentsRequest', - 'GetBackupRequest', - 'GetBackupScheduleRequest', - 'GetDatabaseRequest', - 'GetFieldRequest', - 'GetIndexRequest', - 'ImportDocumentsRequest', - 'ListBackupSchedulesRequest', - 'ListBackupSchedulesResponse', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'ListFieldsRequest', - 'ListFieldsResponse', - 'ListIndexesRequest', - 'ListIndexesResponse', - 'RestoreDatabaseRequest', - 'UpdateBackupScheduleRequest', - 'UpdateDatabaseMetadata', - 'UpdateDatabaseRequest', - 'UpdateFieldRequest', - 'Index', - 'LocationMetadata', - 'ExportDocumentsMetadata', - 'ExportDocumentsResponse', - 'FieldOperationMetadata', - 'ImportDocumentsMetadata', - 'IndexOperationMetadata', - 'Progress', - 'RestoreDatabaseMetadata', - 'OperationState', - 'BackupSchedule', - 'DailyRecurrence', - 'WeeklyRecurrence', -) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/backup.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/backup.py deleted file mode 100644 index baa5c8153f..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/backup.py +++ /dev/null @@ -1,152 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.admin.v1', - manifest={ - 'Backup', - }, -) - - -class Backup(proto.Message): - r"""A Backup of a Cloud Firestore Database. - - The backup contains all documents and index configurations for - the given database at a specific point in time. - - Attributes: - name (str): - Output only. The unique resource name of the Backup. - - Format is - ``projects/{project}/locations/{location}/backups/{backup}``. - database (str): - Output only. Name of the Firestore database that the backup - is from. - - Format is ``projects/{project}/databases/{database}``. - database_uid (str): - Output only. The system-generated UUID4 for - the Firestore database that the backup is from. - snapshot_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The backup contains an - externally consistent copy of the database at - this time. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this - backup expires. - stats (google.cloud.firestore_admin_v1.types.Backup.Stats): - Output only. Statistics about the backup. - - This data only becomes available after the - backup is fully materialized to secondary - storage. This field will be empty till then. - state (google.cloud.firestore_admin_v1.types.Backup.State): - Output only. The current state of the backup. - """ - class State(proto.Enum): - r"""Indicate the current state of the backup. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified. - CREATING (1): - The pending backup is still being created. - Operations on the backup will be rejected in - this state. - READY (2): - The backup is complete and ready to use. - NOT_AVAILABLE (3): - The backup is not available at this moment. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - NOT_AVAILABLE = 3 - - class Stats(proto.Message): - r"""Backup specific statistics. - - Attributes: - size_bytes (int): - Output only. Summation of the size of all - documents and index entries in the backup, - measured in bytes. - document_count (int): - Output only. The total number of documents - contained in the backup. - index_count (int): - Output only. The total number of index - entries contained in the backup. - """ - - size_bytes: int = proto.Field( - proto.INT64, - number=1, - ) - document_count: int = proto.Field( - proto.INT64, - number=2, - ) - index_count: int = proto.Field( - proto.INT64, - number=3, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - database: str = proto.Field( - proto.STRING, - number=2, - ) - database_uid: str = proto.Field( - proto.STRING, - number=7, - ) - snapshot_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - stats: Stats = proto.Field( - proto.MESSAGE, - number=6, - message=Stats, - ) - state: State = proto.Field( - proto.ENUM, - number=8, - enum=State, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/database.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/database.py deleted file mode 100644 index 2ed970a159..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/database.py +++ /dev/null @@ -1,294 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.admin.v1', - manifest={ - 'Database', - }, -) - - -class Database(proto.Message): - r"""A Cloud Firestore Database. - - Attributes: - name (str): - The resource name of the Database. Format: - ``projects/{project}/databases/{database}`` - uid (str): - Output only. The system-generated UUID4 for - this Database. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this database was - created. Databases created before 2016 do not populate - create_time. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this - database was most recently updated. Note this - only includes updates to the database resource - and not data contained by the database. - location_id (str): - The location of the database. Available - locations are listed at - https://cloud.google.com/firestore/docs/locations. - type_ (google.cloud.firestore_admin_v1.types.Database.DatabaseType): - The type of the database. - See - https://cloud.google.com/datastore/docs/firestore-or-datastore - for information about how to choose. - concurrency_mode (google.cloud.firestore_admin_v1.types.Database.ConcurrencyMode): - The concurrency control mode to use for this - database. - version_retention_period (google.protobuf.duration_pb2.Duration): - Output only. The period during which past versions of data - are retained in the database. - - Any [read][google.firestore.v1.GetDocumentRequest.read_time] - or - [query][google.firestore.v1.ListDocumentsRequest.read_time] - can specify a ``read_time`` within this window, and will - read the state of the database at that time. - - If the PITR feature is enabled, the retention period is 7 - days. Otherwise, the retention period is 1 hour. - earliest_version_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The earliest timestamp at which older versions - of the data can be read from the database. See - [version_retention_period] above; this field is populated - with ``now - version_retention_period``. - - This value is continuously updated, and becomes stale the - moment it is queried. If you are using this value to recover - data, make sure to account for the time from the moment when - the value is queried to the moment when you initiate the - recovery. - point_in_time_recovery_enablement (google.cloud.firestore_admin_v1.types.Database.PointInTimeRecoveryEnablement): - Whether to enable the PITR feature on this - database. - app_engine_integration_mode (google.cloud.firestore_admin_v1.types.Database.AppEngineIntegrationMode): - The App Engine integration mode to use for - this database. - key_prefix (str): - Output only. The key_prefix for this database. This - key_prefix is used, in combination with the project id ("~") - to construct the application id that is returned from the - Cloud Datastore APIs in Google App Engine first generation - runtimes. - - This value may be empty in which case the appid to use for - URL-encoded keys is the project_id (eg: foo instead of - v~foo). - delete_protection_state (google.cloud.firestore_admin_v1.types.Database.DeleteProtectionState): - State of delete protection for the database. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - """ - class DatabaseType(proto.Enum): - r"""The type of the database. - See - https://cloud.google.com/datastore/docs/firestore-or-datastore - for information about how to choose. - - Mode changes are only allowed if the database is empty. - - Values: - DATABASE_TYPE_UNSPECIFIED (0): - The default value. This value is used if the - database type is omitted. - FIRESTORE_NATIVE (1): - Firestore Native Mode - DATASTORE_MODE (2): - Firestore in Datastore Mode. - """ - DATABASE_TYPE_UNSPECIFIED = 0 - FIRESTORE_NATIVE = 1 - DATASTORE_MODE = 2 - - class ConcurrencyMode(proto.Enum): - r"""The type of concurrency control mode for transactions. - - Values: - CONCURRENCY_MODE_UNSPECIFIED (0): - Not used. - OPTIMISTIC (1): - Use optimistic concurrency control by - default. This mode is available for Cloud - Firestore databases. - PESSIMISTIC (2): - Use pessimistic concurrency control by - default. This mode is available for Cloud - Firestore databases. - - This is the default setting for Cloud Firestore. - OPTIMISTIC_WITH_ENTITY_GROUPS (3): - Use optimistic concurrency control with - entity groups by default. - This is the only available mode for Cloud - Datastore. - - This mode is also available for Cloud Firestore - with Datastore Mode but is not recommended. - """ - CONCURRENCY_MODE_UNSPECIFIED = 0 - OPTIMISTIC = 1 - PESSIMISTIC = 2 - OPTIMISTIC_WITH_ENTITY_GROUPS = 3 - - class PointInTimeRecoveryEnablement(proto.Enum): - r"""Point In Time Recovery feature enablement. - - Values: - POINT_IN_TIME_RECOVERY_ENABLEMENT_UNSPECIFIED (0): - Not used. - POINT_IN_TIME_RECOVERY_ENABLED (1): - Reads are supported on selected versions of the data from - within the past 7 days: - - - Reads against any timestamp within the past hour - - Reads against 1-minute snapshots beyond 1 hour and within - 7 days - - ``version_retention_period`` and ``earliest_version_time`` - can be used to determine the supported versions. - POINT_IN_TIME_RECOVERY_DISABLED (2): - Reads are supported on any version of the - data from within the past 1 hour. - """ - POINT_IN_TIME_RECOVERY_ENABLEMENT_UNSPECIFIED = 0 - POINT_IN_TIME_RECOVERY_ENABLED = 1 - POINT_IN_TIME_RECOVERY_DISABLED = 2 - - class AppEngineIntegrationMode(proto.Enum): - r"""The type of App Engine integration mode. - - Values: - APP_ENGINE_INTEGRATION_MODE_UNSPECIFIED (0): - Not used. - ENABLED (1): - If an App Engine application exists in the - same region as this database, App Engine - configuration will impact this database. This - includes disabling of the application & - database, as well as disabling writes to the - database. - DISABLED (2): - App Engine has no effect on the ability of - this database to serve requests. - - This is the default setting for databases - created with the Firestore API. - """ - APP_ENGINE_INTEGRATION_MODE_UNSPECIFIED = 0 - ENABLED = 1 - DISABLED = 2 - - class DeleteProtectionState(proto.Enum): - r"""The delete protection state of the database. - - Values: - DELETE_PROTECTION_STATE_UNSPECIFIED (0): - The default value. Delete protection type is - not specified - DELETE_PROTECTION_DISABLED (1): - Delete protection is disabled - DELETE_PROTECTION_ENABLED (2): - Delete protection is enabled - """ - DELETE_PROTECTION_STATE_UNSPECIFIED = 0 - DELETE_PROTECTION_DISABLED = 1 - DELETE_PROTECTION_ENABLED = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - location_id: str = proto.Field( - proto.STRING, - number=9, - ) - type_: DatabaseType = proto.Field( - proto.ENUM, - number=10, - enum=DatabaseType, - ) - concurrency_mode: ConcurrencyMode = proto.Field( - proto.ENUM, - number=15, - enum=ConcurrencyMode, - ) - version_retention_period: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=17, - message=duration_pb2.Duration, - ) - earliest_version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=18, - message=timestamp_pb2.Timestamp, - ) - point_in_time_recovery_enablement: PointInTimeRecoveryEnablement = proto.Field( - proto.ENUM, - number=21, - enum=PointInTimeRecoveryEnablement, - ) - app_engine_integration_mode: AppEngineIntegrationMode = proto.Field( - proto.ENUM, - number=19, - enum=AppEngineIntegrationMode, - ) - key_prefix: str = proto.Field( - proto.STRING, - number=20, - ) - delete_protection_state: DeleteProtectionState = proto.Field( - proto.ENUM, - number=22, - enum=DeleteProtectionState, - ) - etag: str = proto.Field( - proto.STRING, - number=99, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/field.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/field.py deleted file mode 100644 index b0093a22c3..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/field.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_admin_v1.types import index - - -__protobuf__ = proto.module( - package='google.firestore.admin.v1', - manifest={ - 'Field', - }, -) - - -class Field(proto.Message): - r"""Represents a single field in the database. - - Fields are grouped by their "Collection Group", which represent - all collections in the database with the same id. - - Attributes: - name (str): - Required. A field name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`` - - A field path may be a simple field name, e.g. ``address`` or - a path to fields within map_value , e.g. ``address.city``, - or a special field path. The only valid special field is - ``*``, which represents any field. - - Field paths may be quoted using - ``(backtick). The only character that needs to be escaped within a quoted field path is the backtick character itself, escaped using a backslash. Special characters in field paths that must be quoted include:``\ \*\ ``,``.\ :literal:`, ``` (backtick),`\ [``,``]`, - as well as any ascii symbolic characters. - - Examples: (Note: Comments here are written in markdown - syntax, so there is an additional layer of backticks to - represent a code block) - ``\``\ address.city\`\ ``represents a field named``\ address.city\ ``, not the map key``\ city\ ``in the field``\ address\ ``.``\ \`\ *\`\ ``represents a field named``*\ \`, - not any field. - - A special ``Field`` contains the default indexing settings - for all fields. This field's resource name is: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`` - Indexes defined on this ``Field`` will be applied to all - fields which do not have their own ``Field`` index - configuration. - index_config (google.cloud.firestore_admin_v1.types.Field.IndexConfig): - The index configuration for this field. If unset, field - indexing will revert to the configuration defined by the - ``ancestor_field``. To explicitly remove all indexes for - this field, specify an index config with an empty list of - indexes. - ttl_config (google.cloud.firestore_admin_v1.types.Field.TtlConfig): - The TTL configuration for this ``Field``. Setting or - unsetting this will enable or disable the TTL for documents - that have this ``Field``. - """ - - class IndexConfig(proto.Message): - r"""The index configuration for this field. - - Attributes: - indexes (MutableSequence[google.cloud.firestore_admin_v1.types.Index]): - The indexes supported for this field. - uses_ancestor_config (bool): - Output only. When true, the ``Field``'s index configuration - is set from the configuration specified by the - ``ancestor_field``. When false, the ``Field``'s index - configuration is defined explicitly. - ancestor_field (str): - Output only. Specifies the resource name of the ``Field`` - from which this field's index configuration is set (when - ``uses_ancestor_config`` is true), or from which it *would* - be set if this field had no index configuration (when - ``uses_ancestor_config`` is false). - reverting (bool): - Output only When true, the ``Field``'s index configuration - is in the process of being reverted. Once complete, the - index config will transition to the same state as the field - specified by ``ancestor_field``, at which point - ``uses_ancestor_config`` will be ``true`` and ``reverting`` - will be ``false``. - """ - - indexes: MutableSequence[index.Index] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=index.Index, - ) - uses_ancestor_config: bool = proto.Field( - proto.BOOL, - number=2, - ) - ancestor_field: str = proto.Field( - proto.STRING, - number=3, - ) - reverting: bool = proto.Field( - proto.BOOL, - number=4, - ) - - class TtlConfig(proto.Message): - r"""The TTL (time-to-live) configuration for documents that have this - ``Field`` set. Storing a timestamp value into a TTL-enabled field - will be treated as the document's absolute expiration time. Using - any other data type or leaving the field absent will disable the TTL - for the individual document. - - Attributes: - state (google.cloud.firestore_admin_v1.types.Field.TtlConfig.State): - Output only. The state of the TTL - configuration. - """ - class State(proto.Enum): - r"""The state of applying the TTL configuration to all documents. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified or unknown. - CREATING (1): - The TTL is being applied. There is an active - long-running operation to track the change. - Newly written documents will have TTLs applied - as requested. Requested TTLs on existing - documents are still being processed. When TTLs - on all existing documents have been processed, - the state will move to 'ACTIVE'. - ACTIVE (2): - The TTL is active for all documents. - NEEDS_REPAIR (3): - The TTL configuration could not be enabled for all existing - documents. Newly written documents will continue to have - their TTL applied. The LRO returned when last attempting to - enable TTL for this ``Field`` has failed, and may have more - details. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - ACTIVE = 2 - NEEDS_REPAIR = 3 - - state: 'Field.TtlConfig.State' = proto.Field( - proto.ENUM, - number=1, - enum='Field.TtlConfig.State', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - index_config: IndexConfig = proto.Field( - proto.MESSAGE, - number=2, - message=IndexConfig, - ) - ttl_config: TtlConfig = proto.Field( - proto.MESSAGE, - number=3, - message=TtlConfig, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/firestore_admin.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/firestore_admin.py deleted file mode 100644 index db39122110..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/firestore_admin.py +++ /dev/null @@ -1,815 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_admin_v1.types import backup as gfa_backup -from google.cloud.firestore_admin_v1.types import database as gfa_database -from google.cloud.firestore_admin_v1.types import field as gfa_field -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import schedule -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.admin.v1', - manifest={ - 'ListDatabasesRequest', - 'CreateDatabaseRequest', - 'CreateDatabaseMetadata', - 'ListDatabasesResponse', - 'GetDatabaseRequest', - 'UpdateDatabaseRequest', - 'UpdateDatabaseMetadata', - 'DeleteDatabaseRequest', - 'DeleteDatabaseMetadata', - 'CreateBackupScheduleRequest', - 'GetBackupScheduleRequest', - 'UpdateBackupScheduleRequest', - 'ListBackupSchedulesRequest', - 'ListBackupSchedulesResponse', - 'DeleteBackupScheduleRequest', - 'CreateIndexRequest', - 'ListIndexesRequest', - 'ListIndexesResponse', - 'GetIndexRequest', - 'DeleteIndexRequest', - 'UpdateFieldRequest', - 'GetFieldRequest', - 'ListFieldsRequest', - 'ListFieldsResponse', - 'ExportDocumentsRequest', - 'ImportDocumentsRequest', - 'GetBackupRequest', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'DeleteBackupRequest', - 'RestoreDatabaseRequest', - }, -) - - -class ListDatabasesRequest(proto.Message): - r"""A request to list the Firestore Databases in all locations - for a project. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}`` - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDatabaseRequest(proto.Message): - r"""The request for - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}`` - database (google.cloud.firestore_admin_v1.types.Database): - Required. The Database to create. - database_id (str): - Required. The ID to use for the database, which will become - the final component of the database's resource name. - - This value should be 4-63 characters. Valid characters are - /[a-z][0-9]-/ with first character a letter and the last a - letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - "(default)" database id is also valid. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - database: gfa_database.Database = proto.Field( - proto.MESSAGE, - number=2, - message=gfa_database.Database, - ) - database_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class CreateDatabaseMetadata(proto.Message): - r"""Metadata related to the create database operation. - """ - - -class ListDatabasesResponse(proto.Message): - r"""The list of databases for a project. - - Attributes: - databases (MutableSequence[google.cloud.firestore_admin_v1.types.Database]): - The databases in the project. - unreachable (MutableSequence[str]): - In the event that data about individual databases cannot be - listed they will be recorded here. - - An example entry might be: - projects/some_project/locations/some_location This can - happen if the Cloud Region that the Database resides in is - currently unavailable. In this case we can't fetch all the - details about the database. You may be able to get a more - detailed error message (or possibly fetch the resource) by - sending a 'Get' request for the resource or a 'List' request - for the specific location. - """ - - databases: MutableSequence[gfa_database.Database] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gfa_database.Database, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetDatabaseRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateDatabaseRequest(proto.Message): - r"""The request for - [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. - - Attributes: - database (google.cloud.firestore_admin_v1.types.Database): - Required. The database to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - """ - - database: gfa_database.Database = proto.Field( - proto.MESSAGE, - number=1, - message=gfa_database.Database, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class UpdateDatabaseMetadata(proto.Message): - r"""Metadata related to the update database operation. - """ - - -class DeleteDatabaseRequest(proto.Message): - r"""The request for - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - etag (str): - The current etag of the Database. If an etag is provided and - does not match the current etag of the database, deletion - will be blocked and a FAILED_PRECONDITION error will be - returned. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteDatabaseMetadata(proto.Message): - r"""Metadata related to the delete database operation. - """ - - -class CreateBackupScheduleRequest(proto.Message): - r"""The request for - [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. - - Attributes: - parent (str): - Required. The parent database. - - Format ``projects/{project}/databases/{database}`` - backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): - Required. The backup schedule to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_schedule: schedule.BackupSchedule = proto.Field( - proto.MESSAGE, - number=2, - message=schedule.BackupSchedule, - ) - - -class GetBackupScheduleRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. - - Attributes: - name (str): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateBackupScheduleRequest(proto.Message): - r"""The request for - [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. - - Attributes: - backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): - Required. The backup schedule to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - """ - - backup_schedule: schedule.BackupSchedule = proto.Field( - proto.MESSAGE, - number=1, - message=schedule.BackupSchedule, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListBackupSchedulesRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - Attributes: - parent (str): - Required. The parent database. - - Format is ``projects/{project}/databases/{database}``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupSchedulesResponse(proto.Message): - r"""The response for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - Attributes: - backup_schedules (MutableSequence[google.cloud.firestore_admin_v1.types.BackupSchedule]): - List of all backup schedules. - """ - - backup_schedules: MutableSequence[schedule.BackupSchedule] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=schedule.BackupSchedule, - ) - - -class DeleteBackupScheduleRequest(proto.Message): - r"""The request for [FirestoreAdmin.DeleteBackupSchedules][]. - - Attributes: - name (str): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateIndexRequest(proto.Message): - r"""The request for - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - index (google.cloud.firestore_admin_v1.types.Index): - Required. The composite index to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - index: gfa_index.Index = proto.Field( - proto.MESSAGE, - number=2, - message=gfa_index.Index, - ) - - -class ListIndexesRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - filter (str): - The filter to apply to list results. - page_size (int): - The number of results to return. - page_token (str): - A page token, returned from a previous call to - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes], - that may be used to get the next page of results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListIndexesResponse(proto.Message): - r"""The response for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - Attributes: - indexes (MutableSequence[google.cloud.firestore_admin_v1.types.Index]): - The requested indexes. - next_page_token (str): - A page token that may be used to request - another page of results. If blank, this is the - last page. - """ - - @property - def raw_page(self): - return self - - indexes: MutableSequence[gfa_index.Index] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gfa_index.Index, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetIndexRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteIndexRequest(proto.Message): - r"""The request for - [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateFieldRequest(proto.Message): - r"""The request for - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - - Attributes: - field (google.cloud.firestore_admin_v1.types.Field): - Required. The field to be updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask, relative to the field. If specified, only - configuration specified by this field_mask will be updated - in the field. - """ - - field: gfa_field.Field = proto.Field( - proto.MESSAGE, - number=1, - message=gfa_field.Field, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetFieldRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListFieldsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - filter (str): - The filter to apply to list results. Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with a filter that includes - ``indexConfig.usesAncestorConfig:false`` . - page_size (int): - The number of results to return. - page_token (str): - A page token, returned from a previous call to - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields], - that may be used to get the next page of results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListFieldsResponse(proto.Message): - r"""The response for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - Attributes: - fields (MutableSequence[google.cloud.firestore_admin_v1.types.Field]): - The requested fields. - next_page_token (str): - A page token that may be used to request - another page of results. If blank, this is the - last page. - """ - - @property - def raw_page(self): - return self - - fields: MutableSequence[gfa_field.Field] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gfa_field.Field, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ExportDocumentsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - - Attributes: - name (str): - Required. Database to export. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - collection_ids (MutableSequence[str]): - Which collection ids to export. Unspecified - means all collections. - output_uri_prefix (str): - The output URI. Currently only supports Google Cloud Storage - URIs of the form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, - where ``BUCKET_NAME`` is the name of the Google Cloud - Storage bucket and ``NAMESPACE_PATH`` is an optional Google - Cloud Storage namespace path. When choosing a name, be sure - to consider Google Cloud Storage naming guidelines: - https://cloud.google.com/storage/docs/naming. If the URI is - a bucket (without a namespace path), a prefix will be - generated based on the start time. - namespace_ids (MutableSequence[str]): - An empty list represents all namespaces. This - is the preferred usage for databases that don't - use namespaces. - - An empty string element represents the default - namespace. This should be used if the database - has data in non-default namespaces, but doesn't - want to include them. Each namespace in this - list must be unique. - snapshot_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp that corresponds to the version of the - database to be exported. The timestamp must be in the past, - rounded to the minute and not older than - [earliestVersionTime][google.firestore.admin.v1.Database.earliest_version_time]. - If specified, then the exported documents will represent a - consistent view of the database at the provided time. - Otherwise, there are no guarantees about the consistency of - the exported documents. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - output_uri_prefix: str = proto.Field( - proto.STRING, - number=3, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - snapshot_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class ImportDocumentsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - - Attributes: - name (str): - Required. Database to import into. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - collection_ids (MutableSequence[str]): - Which collection ids to import. Unspecified - means all collections included in the import. - input_uri_prefix (str): - Location of the exported files. This must match the - output_uri_prefix of an ExportDocumentsResponse from an - export that has completed successfully. See: - [google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix]. - namespace_ids (MutableSequence[str]): - An empty list represents all namespaces. This - is the preferred usage for databases that don't - use namespaces. - - An empty string element represents the default - namespace. This should be used if the database - has data in non-default namespaces, but doesn't - want to include them. Each namespace in this - list must be unique. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - input_uri_prefix: str = proto.Field( - proto.STRING, - number=3, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class GetBackupRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. - - Attributes: - name (str): - Required. Name of the backup to fetch. - - Format is - ``projects/{project}/locations/{location}/backups/{backup}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - Attributes: - parent (str): - Required. The location to list backups from. - - Format is ``projects/{project}/locations/{location}``. Use - ``{location} = '-'`` to list backups from all locations for - the given project. This allows listing backups from a single - location or from all locations. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupsResponse(proto.Message): - r"""The response for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - Attributes: - backups (MutableSequence[google.cloud.firestore_admin_v1.types.Backup]): - List of all backups for the project. - unreachable (MutableSequence[str]): - List of locations that existing backups were - not able to be fetched from. - Instead of failing the entire requests when a - single location is unreachable, this response - returns a partial result set and list of - locations unable to be reached here. The request - can be retried against a single location to get - a concrete error. - """ - - backups: MutableSequence[gfa_backup.Backup] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gfa_backup.Backup, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeleteBackupRequest(proto.Message): - r"""The request for - [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. - - Attributes: - name (str): - Required. Name of the backup to delete. - - format is - ``projects/{project}/locations/{location}/backups/{backup}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RestoreDatabaseRequest(proto.Message): - r"""The request message for - [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.RestoreDatabase]. - - Attributes: - parent (str): - Required. The project to restore the database in. Format is - ``projects/{project_id}``. - database_id (str): - Required. The ID to use for the database, which will become - the final component of the database's resource name. This - database id must not be associated with an existing - database. - - This value should be 4-63 characters. Valid characters are - /[a-z][0-9]-/ with first character a letter and the last a - letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - "(default)" database id is also valid. - backup (str): - Required. Backup to restore from. Must be from the same - project as the parent. - - Format is: - ``projects/{project_id}/locations/{location}/backups/{backup}`` - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - database_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/index.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/index.py deleted file mode 100644 index 727fa12699..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/index.py +++ /dev/null @@ -1,301 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.admin.v1', - manifest={ - 'Index', - }, -) - - -class Index(proto.Message): - r"""Cloud Firestore indexes enable simple and complex queries - against documents in a database. - - Attributes: - name (str): - Output only. A server defined name for this index. The form - of this name for composite indexes will be: - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}`` - For single field indexes, this field will be empty. - query_scope (google.cloud.firestore_admin_v1.types.Index.QueryScope): - Indexes with a collection query scope - specified allow queries against a collection - that is the child of a specific document, - specified at query time, and that has the same - collection id. - - Indexes with a collection group query scope - specified allow queries against all collections - descended from a specific document, specified at - query time, and that have the same collection id - as this index. - api_scope (google.cloud.firestore_admin_v1.types.Index.ApiScope): - The API scope supported by this index. - fields (MutableSequence[google.cloud.firestore_admin_v1.types.Index.IndexField]): - The fields supported by this index. - - For composite indexes, this requires a minimum of 2 and a - maximum of 100 fields. The last field entry is always for - the field path ``__name__``. If, on creation, ``__name__`` - was not specified as the last field, it will be added - automatically with the same direction as that of the last - field defined. If the final field in a composite index is - not directional, the ``__name__`` will be ordered ASCENDING - (unless explicitly specified). - - For single field indexes, this will always be exactly one - entry with a field path equal to the field path of the - associated field. - state (google.cloud.firestore_admin_v1.types.Index.State): - Output only. The serving state of the index. - """ - class QueryScope(proto.Enum): - r"""Query Scope defines the scope at which a query is run. This is - specified on a StructuredQuery's ``from`` field. - - Values: - QUERY_SCOPE_UNSPECIFIED (0): - The query scope is unspecified. Not a valid - option. - COLLECTION (1): - Indexes with a collection query scope - specified allow queries against a collection - that is the child of a specific document, - specified at query time, and that has the - collection id specified by the index. - COLLECTION_GROUP (2): - Indexes with a collection group query scope - specified allow queries against all collections - that has the collection id specified by the - index. - COLLECTION_RECURSIVE (3): - Include all the collections's ancestor in the - index. Only available for Datastore Mode - databases. - """ - QUERY_SCOPE_UNSPECIFIED = 0 - COLLECTION = 1 - COLLECTION_GROUP = 2 - COLLECTION_RECURSIVE = 3 - - class ApiScope(proto.Enum): - r"""API Scope defines the APIs (Firestore Native, or Firestore in - Datastore Mode) that are supported for queries. - - Values: - ANY_API (0): - The index can only be used by the Firestore - Native query API. This is the default. - DATASTORE_MODE_API (1): - The index can only be used by the Firestore - in Datastore Mode query API. - """ - ANY_API = 0 - DATASTORE_MODE_API = 1 - - class State(proto.Enum): - r"""The state of an index. During index creation, an index will be in - the ``CREATING`` state. If the index is created successfully, it - will transition to the ``READY`` state. If the index creation - encounters a problem, the index will transition to the - ``NEEDS_REPAIR`` state. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified. - CREATING (1): - The index is being created. - There is an active long-running operation for - the index. The index is updated when writing a - document. Some index data may exist. - READY (2): - The index is ready to be used. - The index is updated when writing a document. - The index is fully populated from all stored - documents it applies to. - NEEDS_REPAIR (3): - The index was being created, but something - went wrong. There is no active long-running - operation for the index, and the most recently - finished long-running operation failed. The - index is not updated when writing a document. - Some index data may exist. - Use the google.longrunning.Operations API to - determine why the operation that last attempted - to create this index failed, then re-create the - index. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - NEEDS_REPAIR = 3 - - class IndexField(proto.Message): - r"""A field in an index. The field_path describes which field is - indexed, the value_mode describes how the field value is indexed. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field_path (str): - Can be **name**. For single field indexes, this must match - the name of the field or may be omitted. - order (google.cloud.firestore_admin_v1.types.Index.IndexField.Order): - Indicates that this field supports ordering - by the specified order or comparing using =, !=, - <, <=, >, >=. - - This field is a member of `oneof`_ ``value_mode``. - array_config (google.cloud.firestore_admin_v1.types.Index.IndexField.ArrayConfig): - Indicates that this field supports operations on - ``array_value``\ s. - - This field is a member of `oneof`_ ``value_mode``. - vector_config (google.cloud.firestore_admin_v1.types.Index.IndexField.VectorConfig): - Indicates that this field supports nearest - neighbors and distance operations on vector. - - This field is a member of `oneof`_ ``value_mode``. - """ - class Order(proto.Enum): - r"""The supported orderings. - - Values: - ORDER_UNSPECIFIED (0): - The ordering is unspecified. Not a valid - option. - ASCENDING (1): - The field is ordered by ascending field - value. - DESCENDING (2): - The field is ordered by descending field - value. - """ - ORDER_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - class ArrayConfig(proto.Enum): - r"""The supported array value configurations. - - Values: - ARRAY_CONFIG_UNSPECIFIED (0): - The index does not support additional array - queries. - CONTAINS (1): - The index supports array containment queries. - """ - ARRAY_CONFIG_UNSPECIFIED = 0 - CONTAINS = 1 - - class VectorConfig(proto.Message): - r"""The index configuration to support vector search operations - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - dimension (int): - Required. The vector dimension this - configuration applies to. - The resulting index will only include vectors of - this dimension, and can be used for vector - search with the same dimension. - flat (google.cloud.firestore_admin_v1.types.Index.IndexField.VectorConfig.FlatIndex): - Indicates the vector index is a flat index. - - This field is a member of `oneof`_ ``type``. - """ - - class FlatIndex(proto.Message): - r"""An index that stores vectors in a flat data structure, and - supports exhaustive search. - - """ - - dimension: int = proto.Field( - proto.INT32, - number=1, - ) - flat: 'Index.IndexField.VectorConfig.FlatIndex' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='Index.IndexField.VectorConfig.FlatIndex', - ) - - field_path: str = proto.Field( - proto.STRING, - number=1, - ) - order: 'Index.IndexField.Order' = proto.Field( - proto.ENUM, - number=2, - oneof='value_mode', - enum='Index.IndexField.Order', - ) - array_config: 'Index.IndexField.ArrayConfig' = proto.Field( - proto.ENUM, - number=3, - oneof='value_mode', - enum='Index.IndexField.ArrayConfig', - ) - vector_config: 'Index.IndexField.VectorConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='value_mode', - message='Index.IndexField.VectorConfig', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - query_scope: QueryScope = proto.Field( - proto.ENUM, - number=2, - enum=QueryScope, - ) - api_scope: ApiScope = proto.Field( - proto.ENUM, - number=5, - enum=ApiScope, - ) - fields: MutableSequence[IndexField] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=IndexField, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/location.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/location.py deleted file mode 100644 index 0139a3962f..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/location.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.admin.v1', - manifest={ - 'LocationMetadata', - }, -) - - -class LocationMetadata(proto.Message): - r"""The metadata message for - [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata]. - - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/operation.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/operation.py deleted file mode 100644 index e5be71be20..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/operation.py +++ /dev/null @@ -1,507 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.admin.v1', - manifest={ - 'OperationState', - 'IndexOperationMetadata', - 'FieldOperationMetadata', - 'ExportDocumentsMetadata', - 'ImportDocumentsMetadata', - 'ExportDocumentsResponse', - 'RestoreDatabaseMetadata', - 'Progress', - }, -) - - -class OperationState(proto.Enum): - r"""Describes the state of the operation. - - Values: - OPERATION_STATE_UNSPECIFIED (0): - Unspecified. - INITIALIZING (1): - Request is being prepared for processing. - PROCESSING (2): - Request is actively being processed. - CANCELLING (3): - Request is in the process of being cancelled - after user called - google.longrunning.Operations.CancelOperation on - the operation. - FINALIZING (4): - Request has been processed and is in its - finalization stage. - SUCCESSFUL (5): - Request has completed successfully. - FAILED (6): - Request has finished being processed, but - encountered an error. - CANCELLED (7): - Request has finished being cancelled after - user called - google.longrunning.Operations.CancelOperation. - """ - OPERATION_STATE_UNSPECIFIED = 0 - INITIALIZING = 1 - PROCESSING = 2 - CANCELLING = 3 - FINALIZING = 4 - SUCCESSFUL = 5 - FAILED = 6 - CANCELLED = 7 - - -class IndexOperationMetadata(proto.Message): - r"""Metadata for - [google.longrunning.Operation][google.longrunning.Operation] results - from - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation completed. Will be - unset if operation still in progress. - index (str): - The index resource that this operation is acting on. For - example: - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - state (google.cloud.firestore_admin_v1.types.OperationState): - The state of the operation. - progress_documents (google.cloud.firestore_admin_v1.types.Progress): - The progress, in documents, of this - operation. - progress_bytes (google.cloud.firestore_admin_v1.types.Progress): - The progress, in bytes, of this operation. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - index: str = proto.Field( - proto.STRING, - number=3, - ) - state: 'OperationState' = proto.Field( - proto.ENUM, - number=4, - enum='OperationState', - ) - progress_documents: 'Progress' = proto.Field( - proto.MESSAGE, - number=5, - message='Progress', - ) - progress_bytes: 'Progress' = proto.Field( - proto.MESSAGE, - number=6, - message='Progress', - ) - - -class FieldOperationMetadata(proto.Message): - r"""Metadata for - [google.longrunning.Operation][google.longrunning.Operation] results - from - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation completed. Will be - unset if operation still in progress. - field (str): - The field resource that this operation is acting on. For - example: - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`` - index_config_deltas (MutableSequence[google.cloud.firestore_admin_v1.types.FieldOperationMetadata.IndexConfigDelta]): - A list of - [IndexConfigDelta][google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta], - which describe the intent of this operation. - state (google.cloud.firestore_admin_v1.types.OperationState): - The state of the operation. - progress_documents (google.cloud.firestore_admin_v1.types.Progress): - The progress, in documents, of this - operation. - progress_bytes (google.cloud.firestore_admin_v1.types.Progress): - The progress, in bytes, of this operation. - ttl_config_delta (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.TtlConfigDelta): - Describes the deltas of TTL configuration. - """ - - class IndexConfigDelta(proto.Message): - r"""Information about an index configuration change. - - Attributes: - change_type (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.IndexConfigDelta.ChangeType): - Specifies how the index is changing. - index (google.cloud.firestore_admin_v1.types.Index): - The index being changed. - """ - class ChangeType(proto.Enum): - r"""Specifies how the index is changing. - - Values: - CHANGE_TYPE_UNSPECIFIED (0): - The type of change is not specified or known. - ADD (1): - The single field index is being added. - REMOVE (2): - The single field index is being removed. - """ - CHANGE_TYPE_UNSPECIFIED = 0 - ADD = 1 - REMOVE = 2 - - change_type: 'FieldOperationMetadata.IndexConfigDelta.ChangeType' = proto.Field( - proto.ENUM, - number=1, - enum='FieldOperationMetadata.IndexConfigDelta.ChangeType', - ) - index: gfa_index.Index = proto.Field( - proto.MESSAGE, - number=2, - message=gfa_index.Index, - ) - - class TtlConfigDelta(proto.Message): - r"""Information about a TTL configuration change. - - Attributes: - change_type (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.TtlConfigDelta.ChangeType): - Specifies how the TTL configuration is - changing. - """ - class ChangeType(proto.Enum): - r"""Specifies how the TTL config is changing. - - Values: - CHANGE_TYPE_UNSPECIFIED (0): - The type of change is not specified or known. - ADD (1): - The TTL config is being added. - REMOVE (2): - The TTL config is being removed. - """ - CHANGE_TYPE_UNSPECIFIED = 0 - ADD = 1 - REMOVE = 2 - - change_type: 'FieldOperationMetadata.TtlConfigDelta.ChangeType' = proto.Field( - proto.ENUM, - number=1, - enum='FieldOperationMetadata.TtlConfigDelta.ChangeType', - ) - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - field: str = proto.Field( - proto.STRING, - number=3, - ) - index_config_deltas: MutableSequence[IndexConfigDelta] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=IndexConfigDelta, - ) - state: 'OperationState' = proto.Field( - proto.ENUM, - number=5, - enum='OperationState', - ) - progress_documents: 'Progress' = proto.Field( - proto.MESSAGE, - number=6, - message='Progress', - ) - progress_bytes: 'Progress' = proto.Field( - proto.MESSAGE, - number=7, - message='Progress', - ) - ttl_config_delta: TtlConfigDelta = proto.Field( - proto.MESSAGE, - number=8, - message=TtlConfigDelta, - ) - - -class ExportDocumentsMetadata(proto.Message): - r"""Metadata for - [google.longrunning.Operation][google.longrunning.Operation] results - from - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation completed. Will be - unset if operation still in progress. - operation_state (google.cloud.firestore_admin_v1.types.OperationState): - The state of the export operation. - progress_documents (google.cloud.firestore_admin_v1.types.Progress): - The progress, in documents, of this - operation. - progress_bytes (google.cloud.firestore_admin_v1.types.Progress): - The progress, in bytes, of this operation. - collection_ids (MutableSequence[str]): - Which collection ids are being exported. - output_uri_prefix (str): - Where the documents are being exported to. - namespace_ids (MutableSequence[str]): - Which namespace ids are being exported. - snapshot_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp that corresponds to the version - of the database that is being exported. If - unspecified, there are no guarantees about the - consistency of the documents being exported. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - operation_state: 'OperationState' = proto.Field( - proto.ENUM, - number=3, - enum='OperationState', - ) - progress_documents: 'Progress' = proto.Field( - proto.MESSAGE, - number=4, - message='Progress', - ) - progress_bytes: 'Progress' = proto.Field( - proto.MESSAGE, - number=5, - message='Progress', - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - output_uri_prefix: str = proto.Field( - proto.STRING, - number=7, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - snapshot_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - - -class ImportDocumentsMetadata(proto.Message): - r"""Metadata for - [google.longrunning.Operation][google.longrunning.Operation] results - from - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation completed. Will be - unset if operation still in progress. - operation_state (google.cloud.firestore_admin_v1.types.OperationState): - The state of the import operation. - progress_documents (google.cloud.firestore_admin_v1.types.Progress): - The progress, in documents, of this - operation. - progress_bytes (google.cloud.firestore_admin_v1.types.Progress): - The progress, in bytes, of this operation. - collection_ids (MutableSequence[str]): - Which collection ids are being imported. - input_uri_prefix (str): - The location of the documents being imported. - namespace_ids (MutableSequence[str]): - Which namespace ids are being imported. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - operation_state: 'OperationState' = proto.Field( - proto.ENUM, - number=3, - enum='OperationState', - ) - progress_documents: 'Progress' = proto.Field( - proto.MESSAGE, - number=4, - message='Progress', - ) - progress_bytes: 'Progress' = proto.Field( - proto.MESSAGE, - number=5, - message='Progress', - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - input_uri_prefix: str = proto.Field( - proto.STRING, - number=7, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - - -class ExportDocumentsResponse(proto.Message): - r"""Returned in the - [google.longrunning.Operation][google.longrunning.Operation] - response field. - - Attributes: - output_uri_prefix (str): - Location of the output files. This can be - used to begin an import into Cloud Firestore - (this project or another project) after the - operation completes successfully. - """ - - output_uri_prefix: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RestoreDatabaseMetadata(proto.Message): - r"""Metadata for the [long-running - operation][google.longrunning.Operation] from the - [RestoreDatabase][google.firestore.admin.v1.RestoreDatabase] - request. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time the restore was started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time the restore finished, unset for - ongoing restores. - operation_state (google.cloud.firestore_admin_v1.types.OperationState): - The operation state of the restore. - database (str): - The name of the database being restored to. - backup (str): - The name of the backup restoring from. - progress_percentage (google.cloud.firestore_admin_v1.types.Progress): - How far along the restore is as an estimated - percentage of remaining time. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - operation_state: 'OperationState' = proto.Field( - proto.ENUM, - number=3, - enum='OperationState', - ) - database: str = proto.Field( - proto.STRING, - number=4, - ) - backup: str = proto.Field( - proto.STRING, - number=5, - ) - progress_percentage: 'Progress' = proto.Field( - proto.MESSAGE, - number=8, - message='Progress', - ) - - -class Progress(proto.Message): - r"""Describes the progress of the operation. Unit of work is generic and - must be interpreted based on where - [Progress][google.firestore.admin.v1.Progress] is used. - - Attributes: - estimated_work (int): - The amount of work estimated. - completed_work (int): - The amount of work completed. - """ - - estimated_work: int = proto.Field( - proto.INT64, - number=1, - ) - completed_work: int = proto.Field( - proto.INT64, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/schedule.py b/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/schedule.py deleted file mode 100644 index 31bdb020a4..0000000000 --- a/owl-bot-staging/firestore_admin/v1/google/cloud/firestore_admin_v1/types/schedule.py +++ /dev/null @@ -1,145 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.admin.v1', - manifest={ - 'BackupSchedule', - 'DailyRecurrence', - 'WeeklyRecurrence', - }, -) - - -class BackupSchedule(proto.Message): - r"""A backup schedule for a Cloud Firestore Database. - - This resource is owned by the database it is backing up, and is - deleted along with the database. The actual backups are not - though. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The unique backup schedule identifier across - all locations and databases for the given project. - - This will be auto-assigned. - - Format is - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this - backup schedule was created and effective since. - - No backups will be created for this schedule - before this time. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this backup schedule was - most recently updated. When a backup schedule is first - created, this is the same as create_time. - retention (google.protobuf.duration_pb2.Duration): - At what relative time in the future, compared - to its creation time, the backup should be - deleted, e.g. keep backups for 7 days. - daily_recurrence (google.cloud.firestore_admin_v1.types.DailyRecurrence): - For a schedule that runs daily. - - This field is a member of `oneof`_ ``recurrence``. - weekly_recurrence (google.cloud.firestore_admin_v1.types.WeeklyRecurrence): - For a schedule that runs weekly on a specific - day. - - This field is a member of `oneof`_ ``recurrence``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - retention: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=6, - message=duration_pb2.Duration, - ) - daily_recurrence: 'DailyRecurrence' = proto.Field( - proto.MESSAGE, - number=7, - oneof='recurrence', - message='DailyRecurrence', - ) - weekly_recurrence: 'WeeklyRecurrence' = proto.Field( - proto.MESSAGE, - number=8, - oneof='recurrence', - message='WeeklyRecurrence', - ) - - -class DailyRecurrence(proto.Message): - r"""Represents a recurring schedule that runs at a specific time - every day. - The time zone is UTC. - - """ - - -class WeeklyRecurrence(proto.Message): - r"""Represents a recurring schedule that runs on a specified day - of the week. - The time zone is UTC. - - Attributes: - day (google.type.dayofweek_pb2.DayOfWeek): - The day of week to run. - - DAY_OF_WEEK_UNSPECIFIED is not allowed. - """ - - day: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=2, - enum=dayofweek_pb2.DayOfWeek, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_admin/v1/mypy.ini b/owl-bot-staging/firestore_admin/v1/mypy.ini deleted file mode 100644 index 574c5aed39..0000000000 --- a/owl-bot-staging/firestore_admin/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/firestore_admin/v1/noxfile.py b/owl-bot-staging/firestore_admin/v1/noxfile.py deleted file mode 100644 index 57ae37144f..0000000000 --- a/owl-bot-staging/firestore_admin/v1/noxfile.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12" -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-firestore-admin' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/firestore_admin_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - -@nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): - """Run the unit test suite against pre-release versions of dependencies.""" - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/firestore_admin_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_async.py deleted file mode 100644 index 686098f602..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_create_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateBackupScheduleRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py deleted file mode 100644 index 66ba2b1c1a..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_create_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateBackupScheduleRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_async.py deleted file mode 100644 index bae419d964..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_CreateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_create_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_CreateDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_sync.py deleted file mode 100644 index 57c85d136c..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_database_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_CreateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_create_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_CreateDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_async.py deleted file mode 100644 index 7a98c87fe1..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_CreateIndex_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_create_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateIndexRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_CreateIndex_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_sync.py deleted file mode 100644 index dc6db35bd5..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_create_index_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_CreateIndex_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_create_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateIndexRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_CreateIndex_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_async.py deleted file mode 100644 index 48d49525e5..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_DeleteBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_delete_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup(request=request) - - -# [END firestore_v1_generated_FirestoreAdmin_DeleteBackup_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py deleted file mode 100644 index 52fdf18acf..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_delete_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup_schedule(request=request) - - -# [END firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py deleted file mode 100644 index 985d1d4449..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_delete_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - client.delete_backup_schedule(request=request) - - -# [END firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_sync.py deleted file mode 100644 index 5f9d8c4e61..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_backup_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_DeleteBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_delete_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - client.delete_backup(request=request) - - -# [END firestore_v1_generated_FirestoreAdmin_DeleteBackup_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_async.py deleted file mode 100644 index 9a1915405a..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_DeleteDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_delete_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_DeleteDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_sync.py deleted file mode 100644 index de3b038b47..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_database_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_DeleteDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_delete_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_DeleteDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_async.py deleted file mode 100644 index 233b861c88..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_DeleteIndex_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_delete_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteIndexRequest( - name="name_value", - ) - - # Make the request - await client.delete_index(request=request) - - -# [END firestore_v1_generated_FirestoreAdmin_DeleteIndex_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_sync.py deleted file mode 100644 index 81b1ff6597..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_delete_index_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_DeleteIndex_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_delete_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteIndexRequest( - name="name_value", - ) - - # Make the request - client.delete_index(request=request) - - -# [END firestore_v1_generated_FirestoreAdmin_DeleteIndex_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_async.py deleted file mode 100644 index 32b8249283..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ExportDocuments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_export_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ExportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.export_documents(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ExportDocuments_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_sync.py deleted file mode 100644 index 4a63526654..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_export_documents_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ExportDocuments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_export_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ExportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.export_documents(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ExportDocuments_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_async.py deleted file mode 100644 index a5133fa0aa..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_get_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetBackup_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_async.py deleted file mode 100644 index b0f035cb89..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_get_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py deleted file mode 100644 index 2886304dbb..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_get_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_sync.py deleted file mode 100644 index 4aab97dac8..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_backup_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_get_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetBackup_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_async.py deleted file mode 100644 index 7be034aced..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_get_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_sync.py deleted file mode 100644 index 697e7545c6..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_database_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_get_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_async.py deleted file mode 100644 index 2613a67cbf..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_get_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetFieldRequest( - name="name_value", - ) - - # Make the request - response = await client.get_field(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetField_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_sync.py deleted file mode 100644 index 0127a35473..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_field_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_get_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetFieldRequest( - name="name_value", - ) - - # Make the request - response = client.get_field(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetField_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_async.py deleted file mode 100644 index b04665e3eb..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetIndex_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_get_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetIndexRequest( - name="name_value", - ) - - # Make the request - response = await client.get_index(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetIndex_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_sync.py deleted file mode 100644 index f7deed1839..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_get_index_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_GetIndex_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_get_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetIndexRequest( - name="name_value", - ) - - # Make the request - response = client.get_index(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_GetIndex_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_async.py deleted file mode 100644 index 7eb3b81113..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ImportDocuments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_import_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ImportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.import_documents(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ImportDocuments_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_sync.py deleted file mode 100644 index 64f43b25b3..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_import_documents_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportDocuments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ImportDocuments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_import_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ImportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.import_documents(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ImportDocuments_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_async.py deleted file mode 100644 index 7f80d9fd96..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupSchedules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_list_backup_schedules(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_backup_schedules(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py deleted file mode 100644 index 825263f099..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupSchedules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_list_backup_schedules(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_backup_schedules(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_async.py deleted file mode 100644 index b3f1934253..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListBackups_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_list_backups(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_backups(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListBackups_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_sync.py deleted file mode 100644 index 1decda165b..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_backups_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListBackups_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_list_backups(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_backups(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListBackups_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_async.py deleted file mode 100644 index f27851537c..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListDatabases_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_list_databases(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_databases(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListDatabases_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_sync.py deleted file mode 100644 index b8fe32fa20..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_databases_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListDatabases_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_list_databases(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_databases(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListDatabases_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_async.py deleted file mode 100644 index 43b341a27e..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListFields -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListFields_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_list_fields(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListFieldsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_fields(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListFields_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_sync.py deleted file mode 100644 index 485967b786..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_fields_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListFields -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListFields_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_list_fields(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListFieldsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_fields(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListFields_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_async.py deleted file mode 100644 index 991fc2995b..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListIndexes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListIndexes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_list_indexes(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListIndexesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListIndexes_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_sync.py deleted file mode 100644 index 94bfa11971..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_list_indexes_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListIndexes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_ListIndexes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_list_indexes(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListIndexesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_ListIndexes_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_async.py deleted file mode 100644 index 6102bf2c68..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_RestoreDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_restore_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.RestoreDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - backup="backup_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_RestoreDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_sync.py deleted file mode 100644 index 5eca0416ed..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_restore_database_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_RestoreDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_restore_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.RestoreDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - backup="backup_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_RestoreDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_async.py deleted file mode 100644 index 7a6a5ab72c..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_update_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = await client.update_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py deleted file mode 100644 index b6ae8ee2f2..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_update_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = client.update_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_async.py deleted file mode 100644 index b9e089f16a..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_UpdateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_update_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateDatabaseRequest( - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_UpdateDatabase_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_sync.py deleted file mode 100644 index 8913e0ddbb..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_database_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_UpdateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_update_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateDatabaseRequest( - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_UpdateDatabase_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_async.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_async.py deleted file mode 100644 index 98bc008dd5..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_UpdateField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -async def sample_update_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - field = firestore_admin_v1.Field() - field.name = "name_value" - - request = firestore_admin_v1.UpdateFieldRequest( - field=field, - ) - - # Make the request - operation = client.update_field(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_UpdateField_async] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_sync.py b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_sync.py deleted file mode 100644 index 100601d1c4..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/firestore_v1_generated_firestore_admin_update_field_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-firestore-admin - - -# [START firestore_v1_generated_FirestoreAdmin_UpdateField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import firestore_admin_v1 - - -def sample_update_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - field = firestore_admin_v1.Field() - field.name = "name_value" - - request = firestore_admin_v1.UpdateFieldRequest( - field=field, - ) - - # Make the request - operation = client.update_field(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END firestore_v1_generated_FirestoreAdmin_UpdateField_sync] diff --git a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/snippet_metadata_google.firestore.admin.v1.json b/owl-bot-staging/firestore_admin/v1/samples/generated_samples/snippet_metadata_google.firestore.admin.v1.json deleted file mode 100644 index ce3c2bafb3..0000000000 --- a/owl-bot-staging/firestore_admin/v1/samples/generated_samples/snippet_metadata_google.firestore.admin.v1.json +++ /dev/null @@ -1,3740 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.firestore.admin.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-firestore-admin", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.create_backup_schedule", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "CreateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_schedule", - "type": "google.cloud.firestore_admin_v1.types.BackupSchedule" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", - "shortName": "create_backup_schedule" - }, - "description": "Sample for CreateBackupSchedule", - "file": "firestore_v1_generated_firestore_admin_create_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_create_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.create_backup_schedule", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "CreateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_schedule", - "type": "google.cloud.firestore_admin_v1.types.BackupSchedule" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", - "shortName": "create_backup_schedule" - }, - "description": "Sample for CreateBackupSchedule", - "file": "firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateBackupSchedule_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_create_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.create_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database", - "type": "google.cloud.firestore_admin_v1.types.Database" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "firestore_v1_generated_firestore_admin_create_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateDatabase_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_create_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.create_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database", - "type": "google.cloud.firestore_admin_v1.types.Database" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "firestore_v1_generated_firestore_admin_create_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateDatabase_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_create_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.create_index", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateIndex", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "CreateIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.CreateIndexRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "index", - "type": "google.cloud.firestore_admin_v1.types.Index" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_index" - }, - "description": "Sample for CreateIndex", - "file": "firestore_v1_generated_firestore_admin_create_index_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateIndex_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_create_index_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.create_index", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.CreateIndex", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "CreateIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.CreateIndexRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "index", - "type": "google.cloud.firestore_admin_v1.types.Index" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_index" - }, - "description": "Sample for CreateIndex", - "file": "firestore_v1_generated_firestore_admin_create_index_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_CreateIndex_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_create_index_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.delete_backup_schedule", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteBackupSchedule", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "DeleteBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_backup_schedule" - }, - "description": "Sample for DeleteBackupSchedule", - "file": "firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_delete_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.delete_backup_schedule", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteBackupSchedule", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "DeleteBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_backup_schedule" - }, - "description": "Sample for DeleteBackupSchedule", - "file": "firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteBackupSchedule_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_delete_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.delete_backup", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteBackup", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "DeleteBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.DeleteBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_backup" - }, - "description": "Sample for DeleteBackup", - "file": "firestore_v1_generated_firestore_admin_delete_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteBackup_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_delete_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.delete_backup", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteBackup", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "DeleteBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.DeleteBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_backup" - }, - "description": "Sample for DeleteBackup", - "file": "firestore_v1_generated_firestore_admin_delete_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteBackup_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_delete_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.delete_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "DeleteDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_database" - }, - "description": "Sample for DeleteDatabase", - "file": "firestore_v1_generated_firestore_admin_delete_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteDatabase_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_delete_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.delete_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "DeleteDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_database" - }, - "description": "Sample for DeleteDatabase", - "file": "firestore_v1_generated_firestore_admin_delete_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteDatabase_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_delete_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.delete_index", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteIndex", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "DeleteIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.DeleteIndexRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_index" - }, - "description": "Sample for DeleteIndex", - "file": "firestore_v1_generated_firestore_admin_delete_index_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteIndex_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_delete_index_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.delete_index", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.DeleteIndex", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "DeleteIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.DeleteIndexRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_index" - }, - "description": "Sample for DeleteIndex", - "file": "firestore_v1_generated_firestore_admin_delete_index_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_DeleteIndex_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_delete_index_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.export_documents", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ExportDocuments", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ExportDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ExportDocumentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_documents" - }, - "description": "Sample for ExportDocuments", - "file": "firestore_v1_generated_firestore_admin_export_documents_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ExportDocuments_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_export_documents_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.export_documents", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ExportDocuments", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ExportDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ExportDocumentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_documents" - }, - "description": "Sample for ExportDocuments", - "file": "firestore_v1_generated_firestore_admin_export_documents_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ExportDocuments_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_export_documents_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_backup_schedule", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", - "shortName": "get_backup_schedule" - }, - "description": "Sample for GetBackupSchedule", - "file": "firestore_v1_generated_firestore_admin_get_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_backup_schedule", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", - "shortName": "get_backup_schedule" - }, - "description": "Sample for GetBackupSchedule", - "file": "firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetBackupSchedule_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_backup", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetBackup", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.Backup", - "shortName": "get_backup" - }, - "description": "Sample for GetBackup", - "file": "firestore_v1_generated_firestore_admin_get_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetBackup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_backup", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetBackup", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.Backup", - "shortName": "get_backup" - }, - "description": "Sample for GetBackup", - "file": "firestore_v1_generated_firestore_admin_get_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetBackup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "firestore_v1_generated_firestore_admin_get_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetDatabase_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "firestore_v1_generated_firestore_admin_get_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetDatabase_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_field", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetField", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.Field", - "shortName": "get_field" - }, - "description": "Sample for GetField", - "file": "firestore_v1_generated_firestore_admin_get_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetField_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_field", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetField", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.Field", - "shortName": "get_field" - }, - "description": "Sample for GetField", - "file": "firestore_v1_generated_firestore_admin_get_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetField_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_field_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.get_index", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetIndex", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetIndexRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.Index", - "shortName": "get_index" - }, - "description": "Sample for GetIndex", - "file": "firestore_v1_generated_firestore_admin_get_index_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetIndex_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_index_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.get_index", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.GetIndex", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "GetIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.GetIndexRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.Index", - "shortName": "get_index" - }, - "description": "Sample for GetIndex", - "file": "firestore_v1_generated_firestore_admin_get_index_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_GetIndex_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_get_index_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.import_documents", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ImportDocuments", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ImportDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ImportDocumentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_documents" - }, - "description": "Sample for ImportDocuments", - "file": "firestore_v1_generated_firestore_admin_import_documents_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ImportDocuments_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_import_documents_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.import_documents", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ImportDocuments", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ImportDocuments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ImportDocumentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_documents" - }, - "description": "Sample for ImportDocuments", - "file": "firestore_v1_generated_firestore_admin_import_documents_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ImportDocuments_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_import_documents_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_backup_schedules", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListBackupSchedules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse", - "shortName": "list_backup_schedules" - }, - "description": "Sample for ListBackupSchedules", - "file": "firestore_v1_generated_firestore_admin_list_backup_schedules_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_backup_schedules_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_backup_schedules", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListBackupSchedules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse", - "shortName": "list_backup_schedules" - }, - "description": "Sample for ListBackupSchedules", - "file": "firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListBackupSchedules_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_backup_schedules_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_backups", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListBackups", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListBackups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.ListBackupsResponse", - "shortName": "list_backups" - }, - "description": "Sample for ListBackups", - "file": "firestore_v1_generated_firestore_admin_list_backups_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListBackups_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_backups_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_backups", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListBackups", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListBackups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.ListBackupsResponse", - "shortName": "list_backups" - }, - "description": "Sample for ListBackups", - "file": "firestore_v1_generated_firestore_admin_list_backups_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListBackups_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_backups_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_databases", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListDatabases", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.ListDatabasesResponse", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "firestore_v1_generated_firestore_admin_list_databases_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListDatabases_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_databases_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_databases", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListDatabases", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.ListDatabasesResponse", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "firestore_v1_generated_firestore_admin_list_databases_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListDatabases_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_databases_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_fields", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListFields", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListFields" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListFieldsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsAsyncPager", - "shortName": "list_fields" - }, - "description": "Sample for ListFields", - "file": "firestore_v1_generated_firestore_admin_list_fields_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListFields_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_fields_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_fields", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListFields", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListFields" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListFieldsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsPager", - "shortName": "list_fields" - }, - "description": "Sample for ListFields", - "file": "firestore_v1_generated_firestore_admin_list_fields_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListFields_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_fields_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.list_indexes", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListIndexes", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListIndexes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListIndexesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesAsyncPager", - "shortName": "list_indexes" - }, - "description": "Sample for ListIndexes", - "file": "firestore_v1_generated_firestore_admin_list_indexes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListIndexes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_indexes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.list_indexes", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.ListIndexes", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "ListIndexes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.ListIndexesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesPager", - "shortName": "list_indexes" - }, - "description": "Sample for ListIndexes", - "file": "firestore_v1_generated_firestore_admin_list_indexes_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_ListIndexes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_list_indexes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.restore_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.RestoreDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "RestoreDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "restore_database" - }, - "description": "Sample for RestoreDatabase", - "file": "firestore_v1_generated_firestore_admin_restore_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_RestoreDatabase_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_restore_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.restore_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.RestoreDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "RestoreDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "restore_database" - }, - "description": "Sample for RestoreDatabase", - "file": "firestore_v1_generated_firestore_admin_restore_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_RestoreDatabase_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_restore_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.update_backup_schedule", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "UpdateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest" - }, - { - "name": "backup_schedule", - "type": "google.cloud.firestore_admin_v1.types.BackupSchedule" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", - "shortName": "update_backup_schedule" - }, - "description": "Sample for UpdateBackupSchedule", - "file": "firestore_v1_generated_firestore_admin_update_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_update_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.update_backup_schedule", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "UpdateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest" - }, - { - "name": "backup_schedule", - "type": "google.cloud.firestore_admin_v1.types.BackupSchedule" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.firestore_admin_v1.types.BackupSchedule", - "shortName": "update_backup_schedule" - }, - "description": "Sample for UpdateBackupSchedule", - "file": "firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateBackupSchedule_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_update_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.update_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.firestore_admin_v1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "firestore_v1_generated_firestore_admin_update_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateDatabase_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_update_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.update_database", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.firestore_admin_v1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "firestore_v1_generated_firestore_admin_update_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateDatabase_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_update_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient", - "shortName": "FirestoreAdminAsyncClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminAsyncClient.update_field", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateField", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "UpdateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.UpdateFieldRequest" - }, - { - "name": "field", - "type": "google.cloud.firestore_admin_v1.types.Field" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_field" - }, - "description": "Sample for UpdateField", - "file": "firestore_v1_generated_firestore_admin_update_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateField_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_update_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient", - "shortName": "FirestoreAdminClient" - }, - "fullName": "google.cloud.firestore_admin_v1.FirestoreAdminClient.update_field", - "method": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin.UpdateField", - "service": { - "fullName": "google.firestore.admin.v1.FirestoreAdmin", - "shortName": "FirestoreAdmin" - }, - "shortName": "UpdateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.firestore_admin_v1.types.UpdateFieldRequest" - }, - { - "name": "field", - "type": "google.cloud.firestore_admin_v1.types.Field" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_field" - }, - "description": "Sample for UpdateField", - "file": "firestore_v1_generated_firestore_admin_update_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "firestore_v1_generated_FirestoreAdmin_UpdateField_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "firestore_v1_generated_firestore_admin_update_field_sync.py" - } - ] -} diff --git a/owl-bot-staging/firestore_admin/v1/scripts/fixup_firestore_admin_v1_keywords.py b/owl-bot-staging/firestore_admin/v1/scripts/fixup_firestore_admin_v1_keywords.py deleted file mode 100644 index 6c34107827..0000000000 --- a/owl-bot-staging/firestore_admin/v1/scripts/fixup_firestore_admin_v1_keywords.py +++ /dev/null @@ -1,198 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class firestore_adminCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_backup_schedule': ('parent', 'backup_schedule', ), - 'create_database': ('parent', 'database', 'database_id', ), - 'create_index': ('parent', 'index', ), - 'delete_backup': ('name', ), - 'delete_backup_schedule': ('name', ), - 'delete_database': ('name', 'etag', ), - 'delete_index': ('name', ), - 'export_documents': ('name', 'collection_ids', 'output_uri_prefix', 'namespace_ids', 'snapshot_time', ), - 'get_backup': ('name', ), - 'get_backup_schedule': ('name', ), - 'get_database': ('name', ), - 'get_field': ('name', ), - 'get_index': ('name', ), - 'import_documents': ('name', 'collection_ids', 'input_uri_prefix', 'namespace_ids', ), - 'list_backups': ('parent', ), - 'list_backup_schedules': ('parent', ), - 'list_databases': ('parent', ), - 'list_fields': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_indexes': ('parent', 'filter', 'page_size', 'page_token', ), - 'restore_database': ('parent', 'database_id', 'backup', ), - 'update_backup_schedule': ('backup_schedule', 'update_mask', ), - 'update_database': ('database', 'update_mask', ), - 'update_field': ('field', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=firestore_adminCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the firestore_admin client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/firestore_admin/v1/setup.py b/owl-bot-staging/firestore_admin/v1/setup.py deleted file mode 100644 index 99daa39d68..0000000000 --- a/owl-bot-staging/firestore_admin/v1/setup.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-firestore-admin' - - -description = "Google Cloud Firestore Admin API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/firestore_admin/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-firestore-admin" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.10.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.11.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.12.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.7.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.7.txt deleted file mode 100644 index b8a550c738..0000000000 --- a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.19.5 diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.8.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.9.txt b/owl-bot-staging/firestore_admin/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_admin/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_admin/v1/tests/__init__.py b/owl-bot-staging/firestore_admin/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore_admin/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore_admin/v1/tests/unit/__init__.py b/owl-bot-staging/firestore_admin/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore_admin/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/__init__.py b/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py deleted file mode 100644 index 6073726d67..0000000000 --- a/owl-bot-staging/firestore_admin/v1/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py +++ /dev/null @@ -1,15150 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.firestore_admin_v1.services.firestore_admin import FirestoreAdminAsyncClient -from google.cloud.firestore_admin_v1.services.firestore_admin import FirestoreAdminClient -from google.cloud.firestore_admin_v1.services.firestore_admin import pagers -from google.cloud.firestore_admin_v1.services.firestore_admin import transports -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import database as gfa_database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import field as gfa_field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import operation as gfa_operation -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert FirestoreAdminClient._get_default_mtls_endpoint(None) is None - assert FirestoreAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert FirestoreAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert FirestoreAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert FirestoreAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert FirestoreAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert FirestoreAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert FirestoreAdminClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert FirestoreAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - FirestoreAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert FirestoreAdminClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert FirestoreAdminClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert FirestoreAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - FirestoreAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert FirestoreAdminClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert FirestoreAdminClient._get_client_cert_source(None, False) is None - assert FirestoreAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert FirestoreAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert FirestoreAdminClient._get_client_cert_source(None, True) is mock_default_cert_source - assert FirestoreAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(FirestoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminClient)) -@mock.patch.object(FirestoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE - default_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert FirestoreAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert FirestoreAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - assert FirestoreAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert FirestoreAdminClient._get_api_endpoint(None, None, default_universe, "always") == FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - assert FirestoreAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - assert FirestoreAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert FirestoreAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - FirestoreAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert FirestoreAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert FirestoreAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert FirestoreAdminClient._get_universe_domain(None, None) == FirestoreAdminClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - FirestoreAdminClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - -@pytest.mark.parametrize("client_class,transport_name", [ - (FirestoreAdminClient, "grpc"), - (FirestoreAdminAsyncClient, "grpc_asyncio"), - (FirestoreAdminClient, "rest"), -]) -def test_firestore_admin_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'firestore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://firestore.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.FirestoreAdminGrpcTransport, "grpc"), - (transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.FirestoreAdminRestTransport, "rest"), -]) -def test_firestore_admin_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (FirestoreAdminClient, "grpc"), - (FirestoreAdminAsyncClient, "grpc_asyncio"), - (FirestoreAdminClient, "rest"), -]) -def test_firestore_admin_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'firestore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://firestore.googleapis.com' - ) - - -def test_firestore_admin_client_get_transport_class(): - transport = FirestoreAdminClient.get_transport_class() - available_transports = [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminRestTransport, - ] - assert transport in available_transports - - transport = FirestoreAdminClient.get_transport_class("grpc") - assert transport == transports.FirestoreAdminGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"), - (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest"), -]) -@mock.patch.object(FirestoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminClient)) -@mock.patch.object(FirestoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminAsyncClient)) -def test_firestore_admin_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(FirestoreAdminClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(FirestoreAdminClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", "true"), - (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", "false"), - (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest", "true"), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest", "false"), -]) -@mock.patch.object(FirestoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminClient)) -@mock.patch.object(FirestoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_firestore_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - FirestoreAdminClient, FirestoreAdminAsyncClient -]) -@mock.patch.object(FirestoreAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreAdminClient)) -@mock.patch.object(FirestoreAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreAdminAsyncClient)) -def test_firestore_admin_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - FirestoreAdminClient, FirestoreAdminAsyncClient -]) -@mock.patch.object(FirestoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminClient)) -@mock.patch.object(FirestoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(FirestoreAdminAsyncClient)) -def test_firestore_admin_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE - default_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"), - (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest"), -]) -def test_firestore_admin_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", grpc_helpers), - (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest", None), -]) -def test_firestore_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_firestore_admin_client_client_options_from_dict(): - with mock.patch('google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = FirestoreAdminClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", grpc_helpers), - (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_firestore_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "firestore.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - scopes=None, - default_host="firestore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.CreateIndexRequest, - dict, -]) -def test_create_index(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - client.create_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateIndexRequest() - - -def test_create_index_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.CreateIndexRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - client.create_index(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateIndexRequest( - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_create_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateIndexRequest() - -@pytest.mark.asyncio -async def test_create_index_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.CreateIndexRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_index_async_from_dict(): - await test_create_index_async(request_type=dict) - - -def test_create_index_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateIndexRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_index_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateIndexRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_index_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_index( - parent='parent_value', - index=gfa_index.Index(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].index - mock_val = gfa_index.Index(name='name_value') - assert arg == mock_val - - -def test_create_index_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_index( - firestore_admin.CreateIndexRequest(), - parent='parent_value', - index=gfa_index.Index(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_index_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_index( - parent='parent_value', - index=gfa_index.Index(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].index - mock_val = gfa_index.Index(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_index_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_index( - firestore_admin.CreateIndexRequest(), - parent='parent_value', - index=gfa_index.Index(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListIndexesRequest, - dict, -]) -def test_list_indexes(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListIndexesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListIndexesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_indexes_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - client.list_indexes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListIndexesRequest() - - -def test_list_indexes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListIndexesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - client.list_indexes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListIndexesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -@pytest.mark.asyncio -async def test_list_indexes_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListIndexesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_indexes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListIndexesRequest() - -@pytest.mark.asyncio -async def test_list_indexes_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListIndexesRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListIndexesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListIndexesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_indexes_async_from_dict(): - await test_list_indexes_async(request_type=dict) - - -def test_list_indexes_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListIndexesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - call.return_value = firestore_admin.ListIndexesResponse() - client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_indexes_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListIndexesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListIndexesResponse()) - await client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_indexes_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListIndexesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_indexes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_indexes_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_indexes( - firestore_admin.ListIndexesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_indexes_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListIndexesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListIndexesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_indexes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_indexes_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_indexes( - firestore_admin.ListIndexesRequest(), - parent='parent_value', - ) - - -def test_list_indexes_pager(transport_name: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_indexes(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, index.Index) - for i in results) -def test_list_indexes_pages(transport_name: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - pages = list(client.list_indexes(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_indexes_async_pager(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_indexes(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, index.Index) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_indexes_async_pages(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_indexes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetIndexRequest, - dict, -]) -def test_get_index(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = index.Index( - name='name_value', - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - ) - response = client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.name == 'name_value' - assert response.query_scope == index.Index.QueryScope.COLLECTION - assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API - assert response.state == index.Index.State.CREATING - - -def test_get_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - client.get_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetIndexRequest() - - -def test_get_index_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetIndexRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - client.get_index(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetIndexRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index( - name='name_value', - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - )) - response = await client.get_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetIndexRequest() - -@pytest.mark.asyncio -async def test_get_index_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetIndexRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(index.Index( - name='name_value', - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - )) - response = await client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.name == 'name_value' - assert response.query_scope == index.Index.QueryScope.COLLECTION - assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API - assert response.state == index.Index.State.CREATING - - -@pytest.mark.asyncio -async def test_get_index_async_from_dict(): - await test_get_index_async(request_type=dict) - - -def test_get_index_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetIndexRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - call.return_value = index.Index() - client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_index_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetIndexRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index()) - await client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_index_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = index.Index() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_index( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_index_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_index( - firestore_admin.GetIndexRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_index_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = index.Index() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_index( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_index_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_index( - firestore_admin.GetIndexRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.DeleteIndexRequest, - dict, -]) -def test_delete_index(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - client.delete_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteIndexRequest() - - -def test_delete_index_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DeleteIndexRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - client.delete_index(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteIndexRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteIndexRequest() - -@pytest.mark.asyncio -async def test_delete_index_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.DeleteIndexRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_index_async_from_dict(): - await test_delete_index_async(request_type=dict) - - -def test_delete_index_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteIndexRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - call.return_value = None - client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_index_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteIndexRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_index_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_index( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_index_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_index( - firestore_admin.DeleteIndexRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_index_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_index( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_index_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_index( - firestore_admin.DeleteIndexRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetFieldRequest, - dict, -]) -def test_get_field(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = field.Field( - name='name_value', - ) - response = client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetFieldRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, field.Field) - assert response.name == 'name_value' - - -def test_get_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_field), - '__call__') as call: - client.get_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetFieldRequest() - - -def test_get_field_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetFieldRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_field), - '__call__') as call: - client.get_field(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetFieldRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_field_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field( - name='name_value', - )) - response = await client.get_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetFieldRequest() - -@pytest.mark.asyncio -async def test_get_field_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetFieldRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(field.Field( - name='name_value', - )) - response = await client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetFieldRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, field.Field) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_field_async_from_dict(): - await test_get_field_async(request_type=dict) - - -def test_get_field_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_field), - '__call__') as call: - call.return_value = field.Field() - client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_field_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field()) - await client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_field_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = field.Field() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_field( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_field_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_field( - firestore_admin.GetFieldRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_field_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = field.Field() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_field( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_field_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_field( - firestore_admin.GetFieldRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.UpdateFieldRequest, - dict, -]) -def test_update_field(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateFieldRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_field), - '__call__') as call: - client.update_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateFieldRequest() - - -def test_update_field_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.UpdateFieldRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_field), - '__call__') as call: - client.update_field(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateFieldRequest( - ) - -@pytest.mark.asyncio -async def test_update_field_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateFieldRequest() - -@pytest.mark.asyncio -async def test_update_field_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.UpdateFieldRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateFieldRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_field_async_from_dict(): - await test_update_field_async(request_type=dict) - - -def test_update_field_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateFieldRequest() - - request.field.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_field), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'field.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_field_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateFieldRequest() - - request.field.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'field.name=name_value', - ) in kw['metadata'] - - -def test_update_field_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_field( - field=gfa_field.Field(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].field - mock_val = gfa_field.Field(name='name_value') - assert arg == mock_val - - -def test_update_field_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_field( - firestore_admin.UpdateFieldRequest(), - field=gfa_field.Field(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_update_field_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_field( - field=gfa_field.Field(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].field - mock_val = gfa_field.Field(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_field_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_field( - firestore_admin.UpdateFieldRequest(), - field=gfa_field.Field(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListFieldsRequest, - dict, -]) -def test_list_fields(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListFieldsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListFieldsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFieldsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_fields_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - client.list_fields() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListFieldsRequest() - - -def test_list_fields_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListFieldsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - client.list_fields(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListFieldsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -@pytest.mark.asyncio -async def test_list_fields_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListFieldsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_fields() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListFieldsRequest() - -@pytest.mark.asyncio -async def test_list_fields_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListFieldsRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListFieldsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListFieldsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFieldsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_fields_async_from_dict(): - await test_list_fields_async(request_type=dict) - - -def test_list_fields_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListFieldsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - call.return_value = firestore_admin.ListFieldsResponse() - client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_fields_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListFieldsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListFieldsResponse()) - await client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_fields_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListFieldsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_fields( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_fields_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_fields( - firestore_admin.ListFieldsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_fields_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListFieldsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListFieldsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_fields( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_fields_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_fields( - firestore_admin.ListFieldsRequest(), - parent='parent_value', - ) - - -def test_list_fields_pager(transport_name: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token='abc', - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token='def', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token='ghi', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_fields(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, field.Field) - for i in results) -def test_list_fields_pages(transport_name: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token='abc', - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token='def', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token='ghi', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - RuntimeError, - ) - pages = list(client.list_fields(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_fields_async_pager(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token='abc', - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token='def', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token='ghi', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_fields(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, field.Field) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_fields_async_pages(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token='abc', - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token='def', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token='ghi', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_fields(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ExportDocumentsRequest, - dict, -]) -def test_export_documents(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ExportDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_export_documents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_documents), - '__call__') as call: - client.export_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ExportDocumentsRequest() - - -def test_export_documents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ExportDocumentsRequest( - name='name_value', - output_uri_prefix='output_uri_prefix_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_documents), - '__call__') as call: - client.export_documents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ExportDocumentsRequest( - name='name_value', - output_uri_prefix='output_uri_prefix_value', - ) - -@pytest.mark.asyncio -async def test_export_documents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ExportDocumentsRequest() - -@pytest.mark.asyncio -async def test_export_documents_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ExportDocumentsRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ExportDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_export_documents_async_from_dict(): - await test_export_documents_async(request_type=dict) - - -def test_export_documents_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ExportDocumentsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_documents), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_documents_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ExportDocumentsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_documents), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_export_documents_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.export_documents( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_export_documents_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_documents( - firestore_admin.ExportDocumentsRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_export_documents_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.export_documents( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_export_documents_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.export_documents( - firestore_admin.ExportDocumentsRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ImportDocumentsRequest, - dict, -]) -def test_import_documents(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ImportDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_import_documents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_documents), - '__call__') as call: - client.import_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ImportDocumentsRequest() - - -def test_import_documents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ImportDocumentsRequest( - name='name_value', - input_uri_prefix='input_uri_prefix_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_documents), - '__call__') as call: - client.import_documents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ImportDocumentsRequest( - name='name_value', - input_uri_prefix='input_uri_prefix_value', - ) - -@pytest.mark.asyncio -async def test_import_documents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.import_documents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ImportDocumentsRequest() - -@pytest.mark.asyncio -async def test_import_documents_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ImportDocumentsRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ImportDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_import_documents_async_from_dict(): - await test_import_documents_async(request_type=dict) - - -def test_import_documents_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ImportDocumentsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_documents), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_import_documents_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ImportDocumentsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_documents), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_import_documents_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.import_documents( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_import_documents_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_documents( - firestore_admin.ImportDocumentsRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_import_documents_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_documents), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.import_documents( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_import_documents_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.import_documents( - firestore_admin.ImportDocumentsRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.CreateDatabaseRequest, - dict, -]) -def test_create_database(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - client.create_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateDatabaseRequest() - - -def test_create_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.CreateDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - client.create_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_create_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateDatabaseRequest() - -@pytest.mark.asyncio -async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.CreateDatabaseRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_database_async_from_dict(): - await test_create_database_async(request_type=dict) - - -def test_create_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_database_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_database( - parent='parent_value', - database=gfa_database.Database(name='name_value'), - database_id='database_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database - mock_val = gfa_database.Database(name='name_value') - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - - -def test_create_database_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - firestore_admin.CreateDatabaseRequest(), - parent='parent_value', - database=gfa_database.Database(name='name_value'), - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_create_database_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_database( - parent='parent_value', - database=gfa_database.Database(name='name_value'), - database_id='database_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database - mock_val = gfa_database.Database(name='name_value') - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_database_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_database( - firestore_admin.CreateDatabaseRequest(), - parent='parent_value', - database=gfa_database.Database(name='name_value'), - database_id='database_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetDatabaseRequest, - dict, -]) -def test_get_database(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = database.Database( - name='name_value', - uid='uid_value', - location_id='location_id_value', - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix='key_prefix_value', - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - etag='etag_value', - ) - response = client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, database.Database) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.location_id == 'location_id_value' - assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE - assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC - assert response.point_in_time_recovery_enablement == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED - assert response.app_engine_integration_mode == database.Database.AppEngineIntegrationMode.ENABLED - assert response.key_prefix == 'key_prefix_value' - assert response.delete_protection_state == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED - assert response.etag == 'etag_value' - - -def test_get_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - client.get_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetDatabaseRequest() - - -def test_get_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetDatabaseRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - client.get_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetDatabaseRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(database.Database( - name='name_value', - uid='uid_value', - location_id='location_id_value', - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix='key_prefix_value', - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - etag='etag_value', - )) - response = await client.get_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetDatabaseRequest() - -@pytest.mark.asyncio -async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetDatabaseRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(database.Database( - name='name_value', - uid='uid_value', - location_id='location_id_value', - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix='key_prefix_value', - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - etag='etag_value', - )) - response = await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, database.Database) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.location_id == 'location_id_value' - assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE - assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC - assert response.point_in_time_recovery_enablement == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED - assert response.app_engine_integration_mode == database.Database.AppEngineIntegrationMode.ENABLED - assert response.key_prefix == 'key_prefix_value' - assert response.delete_protection_state == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_database_async_from_dict(): - await test_get_database_async(request_type=dict) - - -def test_get_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = database.Database() - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(database.Database()) - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_database_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = database.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_database_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - firestore_admin.GetDatabaseRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_database_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = database.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(database.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_database_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database( - firestore_admin.GetDatabaseRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListDatabasesRequest, - dict, -]) -def test_list_databases(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListDatabasesResponse( - unreachable=['unreachable_value'], - ) - response = client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListDatabasesResponse) - assert response.unreachable == ['unreachable_value'] - - -def test_list_databases_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - client.list_databases() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListDatabasesRequest() - - -def test_list_databases_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListDatabasesRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - client.list_databases(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListDatabasesRequest( - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_databases_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListDatabasesResponse( - unreachable=['unreachable_value'], - )) - response = await client.list_databases() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListDatabasesRequest() - -@pytest.mark.asyncio -async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListDatabasesRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListDatabasesResponse( - unreachable=['unreachable_value'], - )) - response = await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListDatabasesResponse) - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_databases_async_from_dict(): - await test_list_databases_async(request_type=dict) - - -def test_list_databases_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = firestore_admin.ListDatabasesResponse() - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_databases_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListDatabasesResponse()) - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_databases_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListDatabasesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_databases_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - firestore_admin.ListDatabasesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_databases_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListDatabasesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListDatabasesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_databases_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_databases( - firestore_admin.ListDatabasesRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.UpdateDatabaseRequest, - dict, -]) -def test_update_database(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - client.update_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateDatabaseRequest() - - -def test_update_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.UpdateDatabaseRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - client.update_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateDatabaseRequest( - ) - -@pytest.mark.asyncio -async def test_update_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateDatabaseRequest() - -@pytest.mark.asyncio -async def test_update_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.UpdateDatabaseRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_database_async_from_dict(): - await test_update_database_async(request_type=dict) - - -def test_update_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -def test_update_database_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_database( - database=gfa_database.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = gfa_database.Database(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_database_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - firestore_admin.UpdateDatabaseRequest(), - database=gfa_database.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_database_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_database( - database=gfa_database.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = gfa_database.Database(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_database_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_database( - firestore_admin.UpdateDatabaseRequest(), - database=gfa_database.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.DeleteDatabaseRequest, - dict, -]) -def test_delete_database(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - client.delete_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteDatabaseRequest() - - -def test_delete_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DeleteDatabaseRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - client.delete_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteDatabaseRequest( - name='name_value', - etag='etag_value', - ) - -@pytest.mark.asyncio -async def test_delete_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteDatabaseRequest() - -@pytest.mark.asyncio -async def test_delete_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.DeleteDatabaseRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_database_async_from_dict(): - await test_delete_database_async(request_type=dict) - - -def test_delete_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_database_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_database_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_database( - firestore_admin.DeleteDatabaseRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_database_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_database_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_database( - firestore_admin.DeleteDatabaseRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetBackupRequest, - dict, -]) -def test_get_backup(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup( - name='name_value', - database='database_value', - database_uid='database_uid_value', - state=backup.Backup.State.CREATING, - ) - response = client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.name == 'name_value' - assert response.database == 'database_value' - assert response.database_uid == 'database_uid_value' - assert response.state == backup.Backup.State.CREATING - - -def test_get_backup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - client.get_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupRequest() - - -def test_get_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetBackupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - client.get_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( - name='name_value', - database='database_value', - database_uid='database_uid_value', - state=backup.Backup.State.CREATING, - )) - response = await client.get_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupRequest() - -@pytest.mark.asyncio -async def test_get_backup_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetBackupRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( - name='name_value', - database='database_value', - database_uid='database_uid_value', - state=backup.Backup.State.CREATING, - )) - response = await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.name == 'name_value' - assert response.database == 'database_value' - assert response.database_uid == 'database_uid_value' - assert response.state == backup.Backup.State.CREATING - - -@pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) - - -def test_get_backup_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = backup.Backup() - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - firestore_admin.GetBackupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup( - firestore_admin.GetBackupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListBackupsRequest, - dict, -]) -def test_list_backups(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupsResponse( - unreachable=['unreachable_value'], - ) - response = client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupsResponse) - assert response.unreachable == ['unreachable_value'] - - -def test_list_backups_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - client.list_backups() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupsRequest() - - -def test_list_backups_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListBackupsRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - client.list_backups(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupsRequest( - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backups_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupsResponse( - unreachable=['unreachable_value'], - )) - response = await client.list_backups() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupsRequest() - -@pytest.mark.asyncio -async def test_list_backups_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListBackupsRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupsResponse( - unreachable=['unreachable_value'], - )) - response = await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupsResponse) - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_backups_async_from_dict(): - await test_list_backups_async(request_type=dict) - - -def test_list_backups_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListBackupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = firestore_admin.ListBackupsResponse() - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backups_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListBackupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupsResponse()) - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backups_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backups_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - firestore_admin.ListBackupsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backups_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backups_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backups( - firestore_admin.ListBackupsRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.DeleteBackupRequest, - dict, -]) -def test_delete_backup(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - client.delete_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupRequest() - - -def test_delete_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DeleteBackupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - client.delete_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupRequest() - -@pytest.mark.asyncio -async def test_delete_backup_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.DeleteBackupRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) - - -def test_delete_backup_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = None - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - firestore_admin.DeleteBackupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup( - firestore_admin.DeleteBackupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.RestoreDatabaseRequest, - dict, -]) -def test_restore_database(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.RestoreDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_restore_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - client.restore_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.RestoreDatabaseRequest() - - -def test_restore_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.RestoreDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - client.restore_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.RestoreDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - -@pytest.mark.asyncio -async def test_restore_database_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.restore_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.RestoreDatabaseRequest() - -@pytest.mark.asyncio -async def test_restore_database_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.RestoreDatabaseRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.RestoreDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_restore_database_async_from_dict(): - await test_restore_database_async(request_type=dict) - - -def test_restore_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.RestoreDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_restore_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.RestoreDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.CreateBackupScheduleRequest, - dict, -]) -def test_create_backup_schedule(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule( - name='name_value', - ) - response = client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_create_backup_schedule_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - client.create_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateBackupScheduleRequest() - - -def test_create_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.CreateBackupScheduleRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - client.create_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateBackupScheduleRequest( - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_create_backup_schedule_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( - name='name_value', - )) - response = await client.create_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateBackupScheduleRequest() - -@pytest.mark.asyncio -async def test_create_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.CreateBackupScheduleRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( - name='name_value', - )) - response = await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_backup_schedule_async_from_dict(): - await test_create_backup_schedule_async(request_type=dict) - - -def test_create_backup_schedule_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateBackupScheduleRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value = schedule.BackupSchedule() - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_backup_schedule_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateBackupScheduleRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) - await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_backup_schedule_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup_schedule( - parent='parent_value', - backup_schedule=schedule.BackupSchedule(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_schedule - mock_val = schedule.BackupSchedule(name='name_value') - assert arg == mock_val - - -def test_create_backup_schedule_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_schedule( - firestore_admin.CreateBackupScheduleRequest(), - parent='parent_value', - backup_schedule=schedule.BackupSchedule(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_backup_schedule_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup_schedule( - parent='parent_value', - backup_schedule=schedule.BackupSchedule(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_schedule - mock_val = schedule.BackupSchedule(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_backup_schedule_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup_schedule( - firestore_admin.CreateBackupScheduleRequest(), - parent='parent_value', - backup_schedule=schedule.BackupSchedule(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetBackupScheduleRequest, - dict, -]) -def test_get_backup_schedule(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule( - name='name_value', - ) - response = client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_get_backup_schedule_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - client.get_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupScheduleRequest() - - -def test_get_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetBackupScheduleRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - client.get_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupScheduleRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_schedule_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( - name='name_value', - )) - response = await client.get_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupScheduleRequest() - -@pytest.mark.asyncio -async def test_get_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.GetBackupScheduleRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( - name='name_value', - )) - response = await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_backup_schedule_async_from_dict(): - await test_get_backup_schedule_async(request_type=dict) - - -def test_get_backup_schedule_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value = schedule.BackupSchedule() - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_schedule_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) - await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_schedule_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_schedule_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_schedule( - firestore_admin.GetBackupScheduleRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_schedule_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_schedule_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup_schedule( - firestore_admin.GetBackupScheduleRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListBackupSchedulesRequest, - dict, -]) -def test_list_backup_schedules(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupSchedulesResponse( - ) - response = client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListBackupSchedulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) - - -def test_list_backup_schedules_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - client.list_backup_schedules() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupSchedulesRequest() - - -def test_list_backup_schedules_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListBackupSchedulesRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - client.list_backup_schedules(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupSchedulesRequest( - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backup_schedules_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupSchedulesResponse( - )) - response = await client.list_backup_schedules() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupSchedulesRequest() - -@pytest.mark.asyncio -async def test_list_backup_schedules_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.ListBackupSchedulesRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupSchedulesResponse( - )) - response = await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListBackupSchedulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) - - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_from_dict(): - await test_list_backup_schedules_async(request_type=dict) - - -def test_list_backup_schedules_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListBackupSchedulesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value = firestore_admin.ListBackupSchedulesResponse() - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backup_schedules_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListBackupSchedulesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupSchedulesResponse()) - await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backup_schedules_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupSchedulesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_schedules( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backup_schedules_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_schedules( - firestore_admin.ListBackupSchedulesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backup_schedules_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupSchedulesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(firestore_admin.ListBackupSchedulesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_schedules( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backup_schedules_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_schedules( - firestore_admin.ListBackupSchedulesRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.UpdateBackupScheduleRequest, - dict, -]) -def test_update_backup_schedule(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule( - name='name_value', - ) - response = client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_update_backup_schedule_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - client.update_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateBackupScheduleRequest() - - -def test_update_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.UpdateBackupScheduleRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - client.update_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateBackupScheduleRequest( - ) - -@pytest.mark.asyncio -async def test_update_backup_schedule_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( - name='name_value', - )) - response = await client.update_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateBackupScheduleRequest() - -@pytest.mark.asyncio -async def test_update_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.UpdateBackupScheduleRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule( - name='name_value', - )) - response = await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_backup_schedule_async_from_dict(): - await test_update_backup_schedule_async(request_type=dict) - - -def test_update_backup_schedule_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateBackupScheduleRequest() - - request.backup_schedule.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value = schedule.BackupSchedule() - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup_schedule.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_backup_schedule_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateBackupScheduleRequest() - - request.backup_schedule.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) - await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup_schedule.name=name_value', - ) in kw['metadata'] - - -def test_update_backup_schedule_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_backup_schedule( - backup_schedule=schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].backup_schedule - mock_val = schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_backup_schedule_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_schedule( - firestore_admin.UpdateBackupScheduleRequest(), - backup_schedule=schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_backup_schedule_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.BackupSchedule()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_backup_schedule( - backup_schedule=schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].backup_schedule - mock_val = schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_backup_schedule_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_backup_schedule( - firestore_admin.UpdateBackupScheduleRequest(), - backup_schedule=schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.DeleteBackupScheduleRequest, - dict, -]) -def test_delete_backup_schedule(request_type, transport: str = 'grpc'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_schedule_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - client.delete_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupScheduleRequest() - - -def test_delete_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DeleteBackupScheduleRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - client.delete_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupScheduleRequest( - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_schedule_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup_schedule() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupScheduleRequest() - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=firestore_admin.DeleteBackupScheduleRequest): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async_from_dict(): - await test_delete_backup_schedule_async(request_type=dict) - - -def test_delete_backup_schedule_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value = None - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_schedule_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_schedule_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_schedule( - firestore_admin.DeleteBackupScheduleRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_schedule_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_schedule_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup_schedule( - firestore_admin.DeleteBackupScheduleRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.CreateIndexRequest, - dict, -]) -def test_create_index_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - request_init["index"] = {'name': 'name_value', 'query_scope': 1, 'api_scope': 1, 'fields': [{'field_path': 'field_path_value', 'order': 1, 'array_config': 1, 'vector_config': {'dimension': 966, 'flat': {}}}], 'state': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateIndexRequest.meta.fields["index"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["index"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["index"][field])): - del request_init["index"][field][i][subfield] - else: - del request_init["index"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_index(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_index_rest_required_fields(request_type=firestore_admin.CreateIndexRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_index(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_index_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_index._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "index", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_index_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_create_index") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_create_index") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.CreateIndexRequest.pb(firestore_admin.CreateIndexRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = firestore_admin.CreateIndexRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_index(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_index_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.CreateIndexRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_index(request) - - -def test_create_index_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - index=gfa_index.Index(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_index(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes" % client.transport._host, args[1]) - - -def test_create_index_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_index( - firestore_admin.CreateIndexRequest(), - parent='parent_value', - index=gfa_index.Index(name='name_value'), - ) - - -def test_create_index_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListIndexesRequest, - dict, -]) -def test_list_indexes_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListIndexesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_indexes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_indexes_rest_required_fields(request_type=firestore_admin.ListIndexesRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_indexes._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_indexes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListIndexesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_indexes(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_indexes_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_indexes._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_indexes_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_indexes") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_indexes") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListIndexesRequest.pb(firestore_admin.ListIndexesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListIndexesResponse.to_json(firestore_admin.ListIndexesResponse()) - - request = firestore_admin.ListIndexesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListIndexesResponse() - - client.list_indexes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_indexes_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListIndexesRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_indexes(request) - - -def test_list_indexes_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListIndexesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_indexes(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes" % client.transport._host, args[1]) - - -def test_list_indexes_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_indexes( - firestore_admin.ListIndexesRequest(), - parent='parent_value', - ) - - -def test_list_indexes_rest_pager(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(firestore_admin.ListIndexesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - - pager = client.list_indexes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, index.Index) - for i in results) - - pages = list(client.list_indexes(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetIndexRequest, - dict, -]) -def test_get_index_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = index.Index( - name='name_value', - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = index.Index.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_index(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.name == 'name_value' - assert response.query_scope == index.Index.QueryScope.COLLECTION - assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API - assert response.state == index.Index.State.CREATING - - -def test_get_index_rest_required_fields(request_type=firestore_admin.GetIndexRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = index.Index() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = index.Index.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_index(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_index_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_index._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_index_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_index") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_index") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetIndexRequest.pb(firestore_admin.GetIndexRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = index.Index.to_json(index.Index()) - - request = firestore_admin.GetIndexRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = index.Index() - - client.get_index(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_index_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetIndexRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_index(request) - - -def test_get_index_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = index.Index() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = index.Index.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_index(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}" % client.transport._host, args[1]) - - -def test_get_index_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_index( - firestore_admin.GetIndexRequest(), - name='name_value', - ) - - -def test_get_index_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.DeleteIndexRequest, - dict, -]) -def test_delete_index_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_index(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_index_rest_required_fields(request_type=firestore_admin.DeleteIndexRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_index(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_index_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_index._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_index_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_delete_index") as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteIndexRequest.pb(firestore_admin.DeleteIndexRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = firestore_admin.DeleteIndexRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_index(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_index_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.DeleteIndexRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_index(request) - - -def test_delete_index_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_index(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}" % client.transport._host, args[1]) - - -def test_delete_index_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_index( - firestore_admin.DeleteIndexRequest(), - name='name_value', - ) - - -def test_delete_index_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetFieldRequest, - dict, -]) -def test_get_field_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = field.Field( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = field.Field.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_field(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, field.Field) - assert response.name == 'name_value' - - -def test_get_field_rest_required_fields(request_type=firestore_admin.GetFieldRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_field._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_field._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = field.Field() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = field.Field.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_field(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_field_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_field._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_field_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_field") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_field") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetFieldRequest.pb(firestore_admin.GetFieldRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = field.Field.to_json(field.Field()) - - request = firestore_admin.GetFieldRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = field.Field() - - client.get_field(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_field_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetFieldRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_field(request) - - -def test_get_field_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = field.Field() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = field.Field.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_field(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}" % client.transport._host, args[1]) - - -def test_get_field_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_field( - firestore_admin.GetFieldRequest(), - name='name_value', - ) - - -def test_get_field_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.UpdateFieldRequest, - dict, -]) -def test_update_field_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'field': {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'}} - request_init["field"] = {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4', 'index_config': {'indexes': [{'name': 'name_value', 'query_scope': 1, 'api_scope': 1, 'fields': [{'field_path': 'field_path_value', 'order': 1, 'array_config': 1, 'vector_config': {'dimension': 966, 'flat': {}}}], 'state': 1}], 'uses_ancestor_config': True, 'ancestor_field': 'ancestor_field_value', 'reverting': True}, 'ttl_config': {'state': 1}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.UpdateFieldRequest.meta.fields["field"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["field"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["field"][field])): - del request_init["field"][field][i][subfield] - else: - del request_init["field"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_field(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_field_rest_required_fields(request_type=firestore_admin.UpdateFieldRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_field._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_field._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_field(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_field_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_field._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("field", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_field_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_update_field") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_update_field") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.UpdateFieldRequest.pb(firestore_admin.UpdateFieldRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = firestore_admin.UpdateFieldRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_field(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_field_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.UpdateFieldRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'field': {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_field(request) - - -def test_update_field_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'field': {'name': 'projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - field=gfa_field.Field(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_field(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}" % client.transport._host, args[1]) - - -def test_update_field_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_field( - firestore_admin.UpdateFieldRequest(), - field=gfa_field.Field(name='name_value'), - ) - - -def test_update_field_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListFieldsRequest, - dict, -]) -def test_list_fields_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListFieldsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListFieldsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_fields(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFieldsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_fields_rest_required_fields(request_type=firestore_admin.ListFieldsRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_fields._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_fields._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListFieldsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListFieldsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_fields(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_fields_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_fields._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_fields_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_fields") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_fields") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListFieldsRequest.pb(firestore_admin.ListFieldsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListFieldsResponse.to_json(firestore_admin.ListFieldsResponse()) - - request = firestore_admin.ListFieldsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListFieldsResponse() - - client.list_fields(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_fields_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListFieldsRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_fields(request) - - -def test_list_fields_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListFieldsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListFieldsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_fields(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields" % client.transport._host, args[1]) - - -def test_list_fields_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_fields( - firestore_admin.ListFieldsRequest(), - parent='parent_value', - ) - - -def test_list_fields_rest_pager(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token='abc', - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token='def', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token='ghi', - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(firestore_admin.ListFieldsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/databases/sample2/collectionGroups/sample3'} - - pager = client.list_fields(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, field.Field) - for i in results) - - pages = list(client.list_fields(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ExportDocumentsRequest, - dict, -]) -def test_export_documents_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.export_documents(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_export_documents_rest_required_fields(request_type=firestore_admin.ExportDocumentsRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.export_documents(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_export_documents_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.export_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_export_documents") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_export_documents") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ExportDocumentsRequest.pb(firestore_admin.ExportDocumentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = firestore_admin.ExportDocumentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.export_documents(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_export_documents_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ExportDocumentsRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_documents(request) - - -def test_export_documents_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.export_documents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*}:exportDocuments" % client.transport._host, args[1]) - - -def test_export_documents_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_documents( - firestore_admin.ExportDocumentsRequest(), - name='name_value', - ) - - -def test_export_documents_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ImportDocumentsRequest, - dict, -]) -def test_import_documents_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.import_documents(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_import_documents_rest_required_fields(request_type=firestore_admin.ImportDocumentsRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.import_documents(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_import_documents_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_import_documents") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_import_documents") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ImportDocumentsRequest.pb(firestore_admin.ImportDocumentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = firestore_admin.ImportDocumentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.import_documents(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_import_documents_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ImportDocumentsRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.import_documents(request) - - -def test_import_documents_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.import_documents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*}:importDocuments" % client.transport._host, args[1]) - - -def test_import_documents_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_documents( - firestore_admin.ImportDocumentsRequest(), - name='name_value', - ) - - -def test_import_documents_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.CreateDatabaseRequest, - dict, -]) -def test_create_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request_init["database"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'location_id': 'location_id_value', 'type_': 1, 'concurrency_mode': 1, 'version_retention_period': {'seconds': 751, 'nanos': 543}, 'earliest_version_time': {}, 'point_in_time_recovery_enablement': 1, 'app_engine_integration_mode': 1, 'key_prefix': 'key_prefix_value', 'delete_protection_state': 1, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_database(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_database_rest_required_fields(request_type=firestore_admin.CreateDatabaseRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "databaseId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == request_init["database_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["databaseId"] = 'database_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("database_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == 'database_id_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_database(request) - - expected_params = [ - ( - "databaseId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("databaseId", )) & set(("parent", "database", "databaseId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_create_database") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_create_database") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.CreateDatabaseRequest.pb(firestore_admin.CreateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = firestore_admin.CreateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.CreateDatabaseRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_database(request) - - -def test_create_database_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - database=gfa_database.Database(name='name_value'), - database_id='database_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/databases" % client.transport._host, args[1]) - - -def test_create_database_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - firestore_admin.CreateDatabaseRequest(), - parent='parent_value', - database=gfa_database.Database(name='name_value'), - database_id='database_id_value', - ) - - -def test_create_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetDatabaseRequest, - dict, -]) -def test_get_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = database.Database( - name='name_value', - uid='uid_value', - location_id='location_id_value', - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix='key_prefix_value', - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = database.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, database.Database) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.location_id == 'location_id_value' - assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE - assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC - assert response.point_in_time_recovery_enablement == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED - assert response.app_engine_integration_mode == database.Database.AppEngineIntegrationMode.ENABLED - assert response.key_prefix == 'key_prefix_value' - assert response.delete_protection_state == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED - assert response.etag == 'etag_value' - - -def test_get_database_rest_required_fields(request_type=firestore_admin.GetDatabaseRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = database.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = database.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_database") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_database") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetDatabaseRequest.pb(firestore_admin.GetDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = database.Database.to_json(database.Database()) - - request = firestore_admin.GetDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = database.Database() - - client.get_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetDatabaseRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_database(request) - - -def test_get_database_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = database.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = database.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*}" % client.transport._host, args[1]) - - -def test_get_database_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - firestore_admin.GetDatabaseRequest(), - name='name_value', - ) - - -def test_get_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListDatabasesRequest, - dict, -]) -def test_list_databases_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListDatabasesResponse( - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_databases(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListDatabasesResponse) - assert response.unreachable == ['unreachable_value'] - - -def test_list_databases_rest_required_fields(request_type=firestore_admin.ListDatabasesRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListDatabasesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_databases(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_databases_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_databases._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_databases") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_databases") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListDatabasesRequest.pb(firestore_admin.ListDatabasesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListDatabasesResponse.to_json(firestore_admin.ListDatabasesResponse()) - - request = firestore_admin.ListDatabasesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListDatabasesResponse() - - client.list_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_databases_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListDatabasesRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_databases(request) - - -def test_list_databases_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListDatabasesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_databases(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/databases" % client.transport._host, args[1]) - - -def test_list_databases_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - firestore_admin.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.UpdateDatabaseRequest, - dict, -]) -def test_update_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/databases/sample2'}} - request_init["database"] = {'name': 'projects/sample1/databases/sample2', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'location_id': 'location_id_value', 'type_': 1, 'concurrency_mode': 1, 'version_retention_period': {'seconds': 751, 'nanos': 543}, 'earliest_version_time': {}, 'point_in_time_recovery_enablement': 1, 'app_engine_integration_mode': 1, 'key_prefix': 'key_prefix_value', 'delete_protection_state': 1, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.UpdateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_database(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_database_rest_required_fields(request_type=firestore_admin.UpdateDatabaseRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("database", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_update_database") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_update_database") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.UpdateDatabaseRequest.pb(firestore_admin.UpdateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = firestore_admin.UpdateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.UpdateDatabaseRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/databases/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_database(request) - - -def test_update_database_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'database': {'name': 'projects/sample1/databases/sample2'}} - - # get truthy value for each flattened field - mock_args = dict( - database=gfa_database.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database.name=projects/*/databases/*}" % client.transport._host, args[1]) - - -def test_update_database_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - firestore_admin.UpdateDatabaseRequest(), - database=gfa_database.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.DeleteDatabaseRequest, - dict, -]) -def test_delete_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_database(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_database_rest_required_fields(request_type=firestore_admin.DeleteDatabaseRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_delete_database") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_delete_database") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.DeleteDatabaseRequest.pb(firestore_admin.DeleteDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = firestore_admin.DeleteDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.DeleteDatabaseRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_database(request) - - -def test_delete_database_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*}" % client.transport._host, args[1]) - - -def test_delete_database_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_database( - firestore_admin.DeleteDatabaseRequest(), - name='name_value', - ) - - -def test_delete_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetBackupRequest, - dict, -]) -def test_get_backup_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.Backup( - name='name_value', - database='database_value', - database_uid='database_uid_value', - state=backup.Backup.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_backup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.name == 'name_value' - assert response.database == 'database_value' - assert response.database_uid == 'database_uid_value' - assert response.state == backup.Backup.State.CREATING - - -def test_get_backup_rest_required_fields(request_type=firestore_admin.GetBackupRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup.Backup() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_backup") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetBackupRequest.pb(firestore_admin.GetBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = backup.Backup.to_json(backup.Backup()) - - request = firestore_admin.GetBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup.Backup() - - client.get_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_backup_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetBackupRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_backup(request) - - -def test_get_backup_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.Backup() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, args[1]) - - -def test_get_backup_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - firestore_admin.GetBackupRequest(), - name='name_value', - ) - - -def test_get_backup_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListBackupsRequest, - dict, -]) -def test_list_backups_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupsResponse( - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_backups(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupsResponse) - assert response.unreachable == ['unreachable_value'] - - -def test_list_backups_rest_required_fields(request_type=firestore_admin.ListBackupsRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_backups(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backups_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backups._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_backups") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_backups") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListBackupsRequest.pb(firestore_admin.ListBackupsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListBackupsResponse.to_json(firestore_admin.ListBackupsResponse()) - - request = firestore_admin.ListBackupsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListBackupsResponse() - - client.list_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_backups_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListBackupsRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_backups(request) - - -def test_list_backups_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_backups(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/backups" % client.transport._host, args[1]) - - -def test_list_backups_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - firestore_admin.ListBackupsRequest(), - parent='parent_value', - ) - - -def test_list_backups_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.DeleteBackupRequest, - dict, -]) -def test_delete_backup_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_backup(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_rest_required_fields(request_type=firestore_admin.DeleteBackupRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_delete_backup") as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteBackupRequest.pb(firestore_admin.DeleteBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = firestore_admin.DeleteBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_backup_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.DeleteBackupRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_backup(request) - - -def test_delete_backup_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, args[1]) - - -def test_delete_backup_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - firestore_admin.DeleteBackupRequest(), - name='name_value', - ) - - -def test_delete_backup_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.RestoreDatabaseRequest, - dict, -]) -def test_restore_database_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.restore_database(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_restore_database_rest_required_fields(request_type=firestore_admin.RestoreDatabaseRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request_init["backup"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["databaseId"] = 'database_id_value' - jsonified_request["backup"] = 'backup_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == 'database_id_value' - assert "backup" in jsonified_request - assert jsonified_request["backup"] == 'backup_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.restore_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_restore_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.restore_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "databaseId", "backup", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_restore_database") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_restore_database") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.RestoreDatabaseRequest.pb(firestore_admin.RestoreDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = firestore_admin.RestoreDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.restore_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_restore_database_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.RestoreDatabaseRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.restore_database(request) - - -def test_restore_database_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.CreateBackupScheduleRequest, - dict, -]) -def test_create_backup_schedule_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2'} - request_init["backup_schedule"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention': {'seconds': 751, 'nanos': 543}, 'daily_recurrence': {}, 'weekly_recurrence': {'day': 1}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateBackupScheduleRequest.meta.fields["backup_schedule"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_create_backup_schedule_rest_required_fields(request_type=firestore_admin.CreateBackupScheduleRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "backupSchedule", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_create_backup_schedule") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_create_backup_schedule") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.CreateBackupScheduleRequest.pb(firestore_admin.CreateBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) - - request = firestore_admin.CreateBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schedule.BackupSchedule() - - client.create_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_backup_schedule_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.CreateBackupScheduleRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_backup_schedule(request) - - -def test_create_backup_schedule_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/databases/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup_schedule=schedule.BackupSchedule(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/databases/*}/backupSchedules" % client.transport._host, args[1]) - - -def test_create_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_schedule( - firestore_admin.CreateBackupScheduleRequest(), - parent='parent_value', - backup_schedule=schedule.BackupSchedule(name='name_value'), - ) - - -def test_create_backup_schedule_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.GetBackupScheduleRequest, - dict, -]) -def test_get_backup_schedule_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_get_backup_schedule_rest_required_fields(request_type=firestore_admin.GetBackupScheduleRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_get_backup_schedule") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_get_backup_schedule") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.GetBackupScheduleRequest.pb(firestore_admin.GetBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) - - request = firestore_admin.GetBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schedule.BackupSchedule() - - client.get_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_backup_schedule_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.GetBackupScheduleRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_backup_schedule(request) - - -def test_get_backup_schedule_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) - - -def test_get_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_schedule( - firestore_admin.GetBackupScheduleRequest(), - name='name_value', - ) - - -def test_get_backup_schedule_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.ListBackupSchedulesRequest, - dict, -]) -def test_list_backup_schedules_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupSchedulesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_backup_schedules(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) - - -def test_list_backup_schedules_rest_required_fields(request_type=firestore_admin.ListBackupSchedulesRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupSchedulesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_backup_schedules(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backup_schedules_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_schedules_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_list_backup_schedules") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_list_backup_schedules") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.ListBackupSchedulesRequest.pb(firestore_admin.ListBackupSchedulesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = firestore_admin.ListBackupSchedulesResponse.to_json(firestore_admin.ListBackupSchedulesResponse()) - - request = firestore_admin.ListBackupSchedulesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListBackupSchedulesResponse() - - client.list_backup_schedules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_backup_schedules_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.ListBackupSchedulesRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_backup_schedules(request) - - -def test_list_backup_schedules_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupSchedulesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/databases/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_backup_schedules(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/databases/*}/backupSchedules" % client.transport._host, args[1]) - - -def test_list_backup_schedules_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_schedules( - firestore_admin.ListBackupSchedulesRequest(), - parent='parent_value', - ) - - -def test_list_backup_schedules_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.UpdateBackupScheduleRequest, - dict, -]) -def test_update_backup_schedule_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'backup_schedule': {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'}} - request_init["backup_schedule"] = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention': {'seconds': 751, 'nanos': 543}, 'daily_recurrence': {}, 'weekly_recurrence': {'day': 1}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.UpdateBackupScheduleRequest.meta.fields["backup_schedule"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_update_backup_schedule_rest_required_fields(request_type=firestore_admin.UpdateBackupScheduleRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("backupSchedule", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "post_update_backup_schedule") as post, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_update_backup_schedule") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = firestore_admin.UpdateBackupScheduleRequest.pb(firestore_admin.UpdateBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) - - request = firestore_admin.UpdateBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schedule.BackupSchedule() - - client.update_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_backup_schedule_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.UpdateBackupScheduleRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'backup_schedule': {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_backup_schedule(request) - - -def test_update_backup_schedule_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {'backup_schedule': {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - backup_schedule=schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) - - -def test_update_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_schedule( - firestore_admin.UpdateBackupScheduleRequest(), - backup_schedule=schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_backup_schedule_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - firestore_admin.DeleteBackupScheduleRequest, - dict, -]) -def test_delete_backup_schedule_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_schedule_rest_required_fields(request_type=firestore_admin.DeleteBackupScheduleRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FirestoreAdminRestInterceptor, "pre_delete_backup_schedule") as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteBackupScheduleRequest.pb(firestore_admin.DeleteBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = firestore_admin.DeleteBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_backup_schedule_rest_bad_request(transport: str = 'rest', request_type=firestore_admin.DeleteBackupScheduleRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_backup_schedule(request) - - -def test_delete_backup_schedule_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/databases/sample2/backupSchedules/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) - - -def test_delete_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_schedule( - firestore_admin.DeleteBackupScheduleRequest(), - name='name_value', - ) - - -def test_delete_backup_schedule_rest_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = FirestoreAdminClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.FirestoreAdminGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - transports.FirestoreAdminRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = FirestoreAdminClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.FirestoreAdminGrpcTransport, - ) - -def test_firestore_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.FirestoreAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_firestore_admin_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.FirestoreAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_index', - 'list_indexes', - 'get_index', - 'delete_index', - 'get_field', - 'update_field', - 'list_fields', - 'export_documents', - 'import_documents', - 'create_database', - 'get_database', - 'list_databases', - 'update_database', - 'delete_database', - 'get_backup', - 'list_backups', - 'delete_backup', - 'restore_database', - 'create_backup_schedule', - 'get_backup_schedule', - 'list_backup_schedules', - 'update_backup_schedule', - 'delete_backup_schedule', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_firestore_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirestoreAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - quota_project_id="octopus", - ) - - -def test_firestore_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirestoreAdminTransport() - adc.assert_called_once() - - -def test_firestore_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - FirestoreAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - ], -) -def test_firestore_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/datastore',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - transports.FirestoreAdminRestTransport, - ], -) -def test_firestore_admin_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.FirestoreAdminGrpcTransport, grpc_helpers), - (transports.FirestoreAdminGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_firestore_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "firestore.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - scopes=["1", "2"], - default_host="firestore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.FirestoreAdminGrpcTransport, transports.FirestoreAdminGrpcAsyncIOTransport]) -def test_firestore_admin_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_firestore_admin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.FirestoreAdminRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_firestore_admin_rest_lro_client(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_firestore_admin_host_no_port(transport_name): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='firestore.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'firestore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://firestore.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_firestore_admin_host_with_port(transport_name): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='firestore.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'firestore.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://firestore.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_firestore_admin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = FirestoreAdminClient( - credentials=creds1, - transport=transport_name, - ) - client2 = FirestoreAdminClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_index._session - session2 = client2.transport.create_index._session - assert session1 != session2 - session1 = client1.transport.list_indexes._session - session2 = client2.transport.list_indexes._session - assert session1 != session2 - session1 = client1.transport.get_index._session - session2 = client2.transport.get_index._session - assert session1 != session2 - session1 = client1.transport.delete_index._session - session2 = client2.transport.delete_index._session - assert session1 != session2 - session1 = client1.transport.get_field._session - session2 = client2.transport.get_field._session - assert session1 != session2 - session1 = client1.transport.update_field._session - session2 = client2.transport.update_field._session - assert session1 != session2 - session1 = client1.transport.list_fields._session - session2 = client2.transport.list_fields._session - assert session1 != session2 - session1 = client1.transport.export_documents._session - session2 = client2.transport.export_documents._session - assert session1 != session2 - session1 = client1.transport.import_documents._session - session2 = client2.transport.import_documents._session - assert session1 != session2 - session1 = client1.transport.create_database._session - session2 = client2.transport.create_database._session - assert session1 != session2 - session1 = client1.transport.get_database._session - session2 = client2.transport.get_database._session - assert session1 != session2 - session1 = client1.transport.list_databases._session - session2 = client2.transport.list_databases._session - assert session1 != session2 - session1 = client1.transport.update_database._session - session2 = client2.transport.update_database._session - assert session1 != session2 - session1 = client1.transport.delete_database._session - session2 = client2.transport.delete_database._session - assert session1 != session2 - session1 = client1.transport.get_backup._session - session2 = client2.transport.get_backup._session - assert session1 != session2 - session1 = client1.transport.list_backups._session - session2 = client2.transport.list_backups._session - assert session1 != session2 - session1 = client1.transport.delete_backup._session - session2 = client2.transport.delete_backup._session - assert session1 != session2 - session1 = client1.transport.restore_database._session - session2 = client2.transport.restore_database._session - assert session1 != session2 - session1 = client1.transport.create_backup_schedule._session - session2 = client2.transport.create_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.get_backup_schedule._session - session2 = client2.transport.get_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.list_backup_schedules._session - session2 = client2.transport.list_backup_schedules._session - assert session1 != session2 - session1 = client1.transport.update_backup_schedule._session - session2 = client2.transport.update_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.delete_backup_schedule._session - session2 = client2.transport.delete_backup_schedule._session - assert session1 != session2 -def test_firestore_admin_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FirestoreAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_firestore_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FirestoreAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.FirestoreAdminGrpcTransport, transports.FirestoreAdminGrpcAsyncIOTransport]) -def test_firestore_admin_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.FirestoreAdminGrpcTransport, transports.FirestoreAdminGrpcAsyncIOTransport]) -def test_firestore_admin_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_firestore_admin_grpc_lro_client(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_firestore_admin_grpc_lro_async_client(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_backup_path(): - project = "squid" - location = "clam" - backup = "whelk" - expected = "projects/{project}/locations/{location}/backups/{backup}".format(project=project, location=location, backup=backup, ) - actual = FirestoreAdminClient.backup_path(project, location, backup) - assert expected == actual - - -def test_parse_backup_path(): - expected = { - "project": "octopus", - "location": "oyster", - "backup": "nudibranch", - } - path = FirestoreAdminClient.backup_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_backup_path(path) - assert expected == actual - -def test_backup_schedule_path(): - project = "cuttlefish" - database = "mussel" - backup_schedule = "winkle" - expected = "projects/{project}/databases/{database}/backupSchedules/{backup_schedule}".format(project=project, database=database, backup_schedule=backup_schedule, ) - actual = FirestoreAdminClient.backup_schedule_path(project, database, backup_schedule) - assert expected == actual - - -def test_parse_backup_schedule_path(): - expected = { - "project": "nautilus", - "database": "scallop", - "backup_schedule": "abalone", - } - path = FirestoreAdminClient.backup_schedule_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_backup_schedule_path(path) - assert expected == actual - -def test_collection_group_path(): - project = "squid" - database = "clam" - collection = "whelk" - expected = "projects/{project}/databases/{database}/collectionGroups/{collection}".format(project=project, database=database, collection=collection, ) - actual = FirestoreAdminClient.collection_group_path(project, database, collection) - assert expected == actual - - -def test_parse_collection_group_path(): - expected = { - "project": "octopus", - "database": "oyster", - "collection": "nudibranch", - } - path = FirestoreAdminClient.collection_group_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_collection_group_path(path) - assert expected == actual - -def test_database_path(): - project = "cuttlefish" - database = "mussel" - expected = "projects/{project}/databases/{database}".format(project=project, database=database, ) - actual = FirestoreAdminClient.database_path(project, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "winkle", - "database": "nautilus", - } - path = FirestoreAdminClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_database_path(path) - assert expected == actual - -def test_field_path(): - project = "scallop" - database = "abalone" - collection = "squid" - field = "clam" - expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format(project=project, database=database, collection=collection, field=field, ) - actual = FirestoreAdminClient.field_path(project, database, collection, field) - assert expected == actual - - -def test_parse_field_path(): - expected = { - "project": "whelk", - "database": "octopus", - "collection": "oyster", - "field": "nudibranch", - } - path = FirestoreAdminClient.field_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_field_path(path) - assert expected == actual - -def test_index_path(): - project = "cuttlefish" - database = "mussel" - collection = "winkle" - index = "nautilus" - expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format(project=project, database=database, collection=collection, index=index, ) - actual = FirestoreAdminClient.index_path(project, database, collection, index) - assert expected == actual - - -def test_parse_index_path(): - expected = { - "project": "scallop", - "database": "abalone", - "collection": "squid", - "index": "clam", - } - path = FirestoreAdminClient.index_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_index_path(path) - assert expected == actual - -def test_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = FirestoreAdminClient.location_path(project, location) - assert expected == actual - - -def test_parse_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = FirestoreAdminClient.location_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_location_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = FirestoreAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = FirestoreAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = FirestoreAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = FirestoreAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = FirestoreAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = FirestoreAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = FirestoreAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = FirestoreAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = FirestoreAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = FirestoreAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.FirestoreAdminTransport, '_prep_wrapped_messages') as prep: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.FirestoreAdminTransport, '_prep_wrapped_messages') as prep: - transport_class = FirestoreAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/databases/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/databases/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/databases/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport), - (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/.coveragerc b/owl-bot-staging/firestore_bundle/firestore-bundle-py/.coveragerc deleted file mode 100644 index 8c3b084db3..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bundle/__init__.py - google/cloud/bundle/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/.flake8 b/owl-bot-staging/firestore_bundle/firestore-bundle-py/.flake8 deleted file mode 100644 index 29227d4cf4..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/MANIFEST.in b/owl-bot-staging/firestore_bundle/firestore-bundle-py/MANIFEST.in deleted file mode 100644 index 67688af043..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bundle *.py -recursive-include google/cloud/bundle *.py diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/README.rst b/owl-bot-staging/firestore_bundle/firestore-bundle-py/README.rst deleted file mode 100644 index e0cf79f104..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Bundle API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bundle API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/_static/custom.css b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/_static/custom.css deleted file mode 100644 index 06423be0b5..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/services_.rst b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/services_.rst deleted file mode 100644 index 535624ca8c..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/services_.rst +++ /dev/null @@ -1,4 +0,0 @@ -Services for Google Cloud Bundle API -===================================== -.. toctree:: - :maxdepth: 2 diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/types_.rst b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/types_.rst deleted file mode 100644 index 2fdc85ab78..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/bundle/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bundle API -================================== - -.. automodule:: google.cloud.bundle.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/conf.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/conf.py deleted file mode 100644 index 7e747b50b1..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bundle documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bundle" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bundle-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bundle.tex", - u"google-cloud-bundle Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bundle", - u"Google Cloud Bundle Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bundle", - u"google-cloud-bundle Documentation", - author, - "google-cloud-bundle", - "GAPIC library for Google Cloud Bundle API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/index.rst b/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/index.rst deleted file mode 100644 index ddec428d33..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bundle/services - bundle/types diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/__init__.py deleted file mode 100644 index 3e35a98926..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bundle import gapic_version as package_version - -__version__ = package_version.__version__ - - - -from .types.bundle import BundledDocumentMetadata -from .types.bundle import BundledQuery -from .types.bundle import BundleElement -from .types.bundle import BundleMetadata -from .types.bundle import NamedQuery - -__all__ = ( -'BundleElement', -'BundleMetadata', -'BundledDocumentMetadata', -'BundledQuery', -'NamedQuery', -) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_metadata.json b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_metadata.json deleted file mode 100644 index e81fe51253..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_metadata.json +++ /dev/null @@ -1,7 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bundle", - "protoPackage": "google.firestore.bundle", - "schema": "1.0" -} diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_version.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_version.py deleted file mode 100644 index 558c8aab67..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/py.typed b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/py.typed deleted file mode 100644 index e2987f2963..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bundle package uses inline types. diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/services/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/services/__init__.py deleted file mode 100644 index 8f6cf06824..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/__init__.py deleted file mode 100644 index bd79268795..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .bundle import ( - BundledDocumentMetadata, - BundledQuery, - BundleElement, - BundleMetadata, - NamedQuery, -) - -__all__ = ( - 'BundledDocumentMetadata', - 'BundledQuery', - 'BundleElement', - 'BundleMetadata', - 'NamedQuery', -) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/bundle.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/bundle.py deleted file mode 100644 index 074b54601e..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/google/cloud/bundle/types/bundle.py +++ /dev/null @@ -1,251 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.firestore.v1 import document_pb2 # type: ignore -from google.firestore.v1 import query_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.firestore.bundle', - manifest={ - 'BundledQuery', - 'NamedQuery', - 'BundledDocumentMetadata', - 'BundleMetadata', - 'BundleElement', - }, -) - - -class BundledQuery(proto.Message): - r"""Encodes a query saved in the bundle. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - The parent resource name. - structured_query (google.firestore.v1.query_pb2.StructuredQuery): - A structured query. - - This field is a member of `oneof`_ ``query_type``. - limit_type (google.cloud.bundle.types.BundledQuery.LimitType): - - """ - class LimitType(proto.Enum): - r"""If the query is a limit query, should the limit be applied to - the beginning or the end of results. - - Values: - FIRST (0): - No description available. - LAST (1): - No description available. - """ - FIRST = 0 - LAST = 1 - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_query: query_pb2.StructuredQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof='query_type', - message=query_pb2.StructuredQuery, - ) - limit_type: LimitType = proto.Field( - proto.ENUM, - number=3, - enum=LimitType, - ) - - -class NamedQuery(proto.Message): - r"""A Query associated with a name, created as part of the bundle - file, and can be read by client SDKs once the bundle containing - them is loaded. - - Attributes: - name (str): - Name of the query, such that client can use - the name to load this query from bundle, and - resume from when the query results are - materialized into this bundle. - bundled_query (google.cloud.bundle.types.BundledQuery): - The query saved in the bundle. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The read time of the query, when it is used - to build the bundle. This is useful to resume - the query from the bundle, once it is loaded by - client SDKs. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - bundled_query: 'BundledQuery' = proto.Field( - proto.MESSAGE, - number=2, - message='BundledQuery', - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class BundledDocumentMetadata(proto.Message): - r"""Metadata describing a Firestore document saved in the bundle. - - Attributes: - name (str): - The document key of a bundled document. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The snapshot version of the document data - bundled. - exists (bool): - Whether the document exists. - queries (MutableSequence[str]): - The names of the queries in this bundle that - this document matches to. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - exists: bool = proto.Field( - proto.BOOL, - number=3, - ) - queries: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class BundleMetadata(proto.Message): - r"""Metadata describing the bundle file/stream. - - Attributes: - id (str): - The ID of the bundle. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time at which the documents snapshot is taken - for this bundle. - version (int): - The schema version of the bundle. - total_documents (int): - The number of documents in the bundle. - total_bytes (int): - The size of the bundle in bytes, excluding this - ``BundleMetadata``. - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - version: int = proto.Field( - proto.UINT32, - number=3, - ) - total_documents: int = proto.Field( - proto.UINT32, - number=4, - ) - total_bytes: int = proto.Field( - proto.UINT64, - number=5, - ) - - -class BundleElement(proto.Message): - r"""A Firestore bundle is a length-prefixed stream of JSON - representations of ``BundleElement``. Only one ``BundleMetadata`` is - expected, and it should be the first element. The named queries - follow after ``metadata``. Every ``document_metadata`` is - immediately followed by a ``document``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - metadata (google.cloud.bundle.types.BundleMetadata): - - This field is a member of `oneof`_ ``element_type``. - named_query (google.cloud.bundle.types.NamedQuery): - - This field is a member of `oneof`_ ``element_type``. - document_metadata (google.cloud.bundle.types.BundledDocumentMetadata): - - This field is a member of `oneof`_ ``element_type``. - document (google.firestore.v1.document_pb2.Document): - - This field is a member of `oneof`_ ``element_type``. - """ - - metadata: 'BundleMetadata' = proto.Field( - proto.MESSAGE, - number=1, - oneof='element_type', - message='BundleMetadata', - ) - named_query: 'NamedQuery' = proto.Field( - proto.MESSAGE, - number=2, - oneof='element_type', - message='NamedQuery', - ) - document_metadata: 'BundledDocumentMetadata' = proto.Field( - proto.MESSAGE, - number=3, - oneof='element_type', - message='BundledDocumentMetadata', - ) - document: document_pb2.Document = proto.Field( - proto.MESSAGE, - number=4, - oneof='element_type', - message=document_pb2.Document, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/mypy.ini b/owl-bot-staging/firestore_bundle/firestore-bundle-py/mypy.ini deleted file mode 100644 index 574c5aed39..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/noxfile.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/noxfile.py deleted file mode 100644 index 0d6a9976d2..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/noxfile.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12" -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bundle' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bundle/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - -@nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): - """Run the unit test suite against pre-release versions of dependencies.""" - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bundle/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/scripts/fixup_bundle_keywords.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/scripts/fixup_bundle_keywords.py deleted file mode 100644 index ff298689fd..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/scripts/fixup_bundle_keywords.py +++ /dev/null @@ -1,175 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bundleCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bundleCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bundle client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/setup.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/setup.py deleted file mode 100644 index 85af540536..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/setup.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bundle' - - -description = "Google Cloud Bundle API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bundle/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bundle" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.10.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.11.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.12.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.7.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.7.txt deleted file mode 100644 index b8a550c738..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.19.5 diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.8.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.9.txt b/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed25..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/bundle/__init__.py b/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/bundle/__init__.py deleted file mode 100644 index 7b3de3117f..0000000000 --- a/owl-bot-staging/firestore_bundle/firestore-bundle-py/tests/unit/gapic/bundle/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#