From a2ed2abe3f200107a7688740d5e3c04f00cbd97d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Jul 2024 12:55:34 -0700 Subject: [PATCH 1/4] chore(deps): bump zipp from 3.17.0 to 3.19.1 in /.kokoro (#989) Bumps [zipp](https://github.com/jaraco/zipp) from 3.17.0 to 3.19.1. - [Release notes](https://github.com/jaraco/zipp/releases) - [Changelog](https://github.com/jaraco/zipp/blob/main/NEWS.rst) - [Commits](https://github.com/jaraco/zipp/compare/v3.17.0...v3.19.1) --- updated-dependencies: - dependency-name: zipp dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .kokoro/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 51f92b8e1..4c4c77cd0 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -502,9 +502,9 @@ wheel==0.41.3 \ --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +zipp==3.19.1 \ + --hash=sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091 \ + --hash=sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: From 481c8d6b17fdd26233b6b271b1fcb1cc390ad434 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 17 Jul 2024 17:00:41 -0400 Subject: [PATCH 2/4] chore: update templated files (#986) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update templated files * update replacement in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * upgrade place_before owlbot functionality * Revert "upgrade place_before owlbot functionality" This reverts commit e29fdec4e014c6e1b72f7246a0f096e45e6491cd. * fixed replacement for docfx patch * fix missing close quote * fixed quote style * added line breaks * remove escape * Add 'OwlBot Post Processor' as a required check * remove noxfile from owlbot control * removed experimental_v3 branch customized protection settings * added test tag requirement --------- Co-authored-by: Owl Bot Co-authored-by: Daniel Sanche --- .flake8 | 2 +- .github/.OwlBot.lock.yaml | 3 +- .github/auto-label.yaml | 2 +- .github/sync-repo-settings.yaml | 20 +- .kokoro/build.sh | 2 +- .kokoro/docker/docs/Dockerfile | 2 +- .kokoro/populate-secrets.sh | 2 +- .kokoro/publish-docs.sh | 2 +- .kokoro/release.sh | 2 +- .kokoro/requirements.txt | 509 ++++++++++++++------------- .kokoro/test-samples-against-head.sh | 2 +- .kokoro/test-samples-impl.sh | 2 +- .kokoro/test-samples.sh | 2 +- .kokoro/trampoline.sh | 2 +- .kokoro/trampoline_v2.sh | 2 +- .pre-commit-config.yaml | 2 +- .trampolinerc | 2 +- MANIFEST.in | 2 +- docs/conf.py | 2 +- noxfile.py | 56 ++- owlbot.py | 140 +------- samples/snippets/filters/noxfile.py | 16 +- scripts/decrypt-secrets.sh | 2 +- scripts/readme-gen/readme_gen.py | 2 +- 24 files changed, 339 insertions(+), 441 deletions(-) diff --git a/.flake8 b/.flake8 index 87f6e408c..32986c792 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 81f87c569..620159621 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 -# created: 2024-04-12T11:35:58.922854369Z + digest: sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index 8b37ee897..21786a4eb 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index a8cc5b33b..1319e555d 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -31,24 +31,8 @@ branchProtectionRules: - 'Kokoro' - 'Kokoro system-3.8' - 'cla/google' -- pattern: experimental_v3 - # Can admins overwrite branch protection. - # Defaults to `true` - isAdminEnforced: false - # Number of approving reviews required to update matching branches. - # Defaults to `1` - requiredApprovingReviewCount: 1 - # Are reviews from code owners required to update matching branches. - # Defaults to `false` - requiresCodeOwnerReviews: false - # Require up to date branches - requiresStrictStatusChecks: false - # List of required status check contexts that must pass for commits to be accepted to matching branches. - requiredStatusCheckContexts: - - 'Kokoro' - - 'Kokoro system-3.8' - - 'cla/google' - - 'Conformance / Async v3 Client / Python 3.8' + - 'Conformance / Async v3 Client / Python 3.8 / Test Tag v0.0.2' + - 'OwlBot Post Processor' # List of explicit permissions to add (additive only) permissionRules: # Team slug to add to repository permissions diff --git a/.kokoro/build.sh b/.kokoro/build.sh index b2212fce8..b00036db3 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index bdaf39fe2..a26ce6193 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh index 6f3972140..c435402f4 100755 --- a/.kokoro/populate-secrets.sh +++ b/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 9eafe0be3..38f083f05 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 2e1cbfa81..d21aacc5e 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 4c4c77cd0..35ece0e4d 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,21 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2024.6.2 \ + --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ + --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -87,90 +91,90 @@ click==8.0.4 \ # -r requirements.in # gcp-docuploader # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via # gcp-docuploader # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 +cryptography==42.0.8 \ + --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ + --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ + --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ + --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ + --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ + --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ + --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ + --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ + --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ + --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ + --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ + --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ + --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ + --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ + --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ + --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ + --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ + --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ + --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ + --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ + --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ + --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ + --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ + --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ + --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ + --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ + --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ + --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ + --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ + --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ + --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ + --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b +docutils==0.21.2 \ + --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ + --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f +gcp-releasetool==2.0.1 \ + --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ + --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via # google-cloud-core # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.31.0 \ + --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ + --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 +google-cloud-storage==2.17.0 \ + --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ + --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -244,28 +248,36 @@ google-crc32c==1.5.0 \ # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b +google-resumable-media==2.7.1 \ + --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ + --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via google-api-core idna==3.7 \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==8.0.0 \ + --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ + --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 # via # -r requirements.in # keyring # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.1 \ + --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ + --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -273,13 +285,13 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # gcp-releasetool # twine @@ -287,146 +299,153 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +nh3==0.2.17 \ + --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ + --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ + --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ + --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ + --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ + --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ + --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ + --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ + --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ + --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ + --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ + --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ + --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ + --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ + --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ + --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via google-api-core +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # proto-plus +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.18.0 \ + --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ + --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a # via # readme-renderer # rich @@ -434,20 +453,20 @@ pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 +pyperclip==1.9.0 \ + --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 +readme-renderer==43.0 \ + --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ + --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via # gcp-releasetool # google-api-core @@ -462,9 +481,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -480,35 +499,39 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +twine==5.1.1 \ + --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ + --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.19.1 \ - --hash=sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091 \ - --hash=sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==70.2.0 \ + --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ + --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 # via -r requirements.in diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index 63ac41dfa..e9d8bd79a 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 5a0f5fab6..55910c8ba 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 50b35a48c..7933d8201 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index d85b1f267..48f796997 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 59a7cf3a9..35fa52923 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a8e16950..1d74695f7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.trampolinerc b/.trampolinerc index a7dfeb42c..008015237 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/MANIFEST.in b/MANIFEST.in index e0a667053..d6814cd60 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/docs/conf.py b/docs/conf.py index b5a870f58..d8f0352cd 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/noxfile.py b/noxfile.py index 3ea12c187..5fb94526d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -185,14 +185,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -206,15 +220,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -430,10 +441,17 @@ def docfx(session): session.run("python", "docs/scripts/patch_devsite_toc.py") -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -468,9 +486,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -496,7 +514,13 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -509,6 +533,9 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) if os.path.exists(system_test_folder_path): session.run( @@ -517,4 +544,7 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/owlbot.py b/owlbot.py index 170bc08d4..84aa3d61b 100644 --- a/owlbot.py +++ b/owlbot.py @@ -95,145 +95,7 @@ def get_staging_dirs( ], ) -s.move(templated_files, excludes=[".coveragerc", "README.rst", ".github/release-please.yml"]) - -# ---------------------------------------------------------------------------- -# Customize noxfile.py -# ---------------------------------------------------------------------------- - -def place_before(path, text, *before_text, escape=None): - replacement = "\n".join(before_text) + "\n" + text - if escape: - for c in escape: - text = text.replace(c, '\\' + c) - s.replace([path], text, replacement) - -system_emulated_session = """ -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system_emulated(session): - import subprocess - import signal - - try: - subprocess.call(["gcloud", "--version"]) - except OSError: - session.skip("gcloud not found but required for emulator support") - - # Currently, CI/CD doesn't have beta component of gcloud. - subprocess.call(["gcloud", "components", "install", "beta", "bigtable"]) - - hostport = "localhost:8789" - session.env["BIGTABLE_EMULATOR_HOST"] = hostport - - p = subprocess.Popen( - ["gcloud", "beta", "emulators", "bigtable", "start", "--host-port", hostport] - ) - - try: - system(session) - finally: - # Stop Emulator - os.killpg(os.getpgid(p.pid), signal.SIGKILL) - -""" - -place_before( - "noxfile.py", - "@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)\n" - "def system(session):", - system_emulated_session, - escape="()" -) - -conformance_session = """ -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def conformance(session): - TEST_REPO_URL = "https://github.com/googleapis/cloud-bigtable-clients-test.git" - CLONE_REPO_DIR = "cloud-bigtable-clients-test" - # install dependencies - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - with session.chdir("test_proxy"): - # download the conformance test suite - clone_dir = os.path.join(CURRENT_DIRECTORY, CLONE_REPO_DIR) - if not os.path.exists(clone_dir): - print("downloading copy of test repo") - session.run("git", "clone", TEST_REPO_URL, CLONE_REPO_DIR, external=True) - session.run("bash", "-e", "run_tests.sh", external=True) - -""" - -place_before( - "noxfile.py", - "@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)\n" - "def system(session):", - conformance_session, - escape="()" -) - -# add system_emulated and mypy and conformance to nox session -s.replace("noxfile.py", - """nox.options.sessions = \[ - "unit", - "system",""", - """nox.options.sessions = [ - "unit", - "system_emulated", - "system", - "mypy",""", -) - - -s.replace( - "noxfile.py", - """\ -@nox.session\(python=DEFAULT_PYTHON_VERSION\) -def lint_setup_py\(session\): -""", - '''\ -@nox.session(python=DEFAULT_PYTHON_VERSION) -def mypy(session): - """Verify type hints are mypy compatible.""" - session.install("-e", ".") - session.install("mypy", "types-setuptools", "types-protobuf", "types-mock", "types-requests") - session.install("google-cloud-testutils") - session.run( - "mypy", - "-p", - "google.cloud.bigtable.data", - "--check-untyped-defs", - "--warn-unreachable", - "--disallow-any-generics", - "--exclude", - "tests/system/v2_client", - "--exclude", - "tests/unit/v2_client", - ) - - -# add customization to docfx -docfx_postprocess = """ - # Customization: Add extra sections to the table of contents for the Classic vs Async clients - session.install("pyyaml") - session.run("python", "docs/scripts/patch_devsite_toc.py") -""" - -place_before( - "noxfile.py", - "@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)\n" - "def prerelease_deps(session):", - docfx_postprocess, - escape="()" -) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): -''', -) - +s.move(templated_files, excludes=[".coveragerc", "README.rst", ".github/release-please.yml", "noxfile.py"]) # ---------------------------------------------------------------------------- # Customize gapics to include PooledBigtableGrpcAsyncIOTransport diff --git a/samples/snippets/filters/noxfile.py b/samples/snippets/filters/noxfile.py index c36d5f2d8..483b55901 100644 --- a/samples/snippets/filters/noxfile.py +++ b/samples/snippets/filters/noxfile.py @@ -22,6 +22,7 @@ import nox + # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING # DO NOT EDIT THIS FILE EVER! @@ -159,7 +160,6 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # - @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -187,9 +187,7 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -211,7 +209,9 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -224,9 +224,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) + concurrent_args.extend(['-n', 'auto']) session.run( "pytest", @@ -256,7 +256,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" + """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index 0018b421d..120b0ddc4 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index 1acc11983..8f5e248a0 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 7ac8e142f99a6891b6bc286858f764def503e89a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 18 Jul 2024 06:29:24 -0700 Subject: [PATCH 3/4] fix: Allow protobuf 5.x (#972) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.0 PiperOrigin-RevId: 638650618 Source-Link: https://github.com/googleapis/googleapis/commit/6330f0389afdd04235c59898cc44f715b077aa25 Source-Link: https://github.com/googleapis/googleapis-gen/commit/44fa4f1979dc45c1778fd7caf13f8e61c6d1cae8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9 * feat(spanner): Add support for Cloud Spanner Scheduled Backups PiperOrigin-RevId: 649277844 Source-Link: https://github.com/googleapis/googleapis/commit/fd7efa2da3860e813485e63661d3bdd21fc9ba82 Source-Link: https://github.com/googleapis/googleapis-gen/commit/50be251329d8db5b555626ebd4886721f547d3cc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9 * feat: publish the Cloud Bigtable ExecuteQuery API The ExecuteQuery API will allow users to query Bigtable using SQL PiperOrigin-RevId: 650660213 Source-Link: https://github.com/googleapis/googleapis/commit/f681f79a93814d8b974da9dd8cdc62228d0f4758 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3180845487136794952b8f365fe6c6868999d9c0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzE4MDg0NTQ4NzEzNjc5NDk1MmI4ZjM2NWZlNmM2ODY4OTk5ZDljMCJ9 * feat: publish ProtoRows Message This is needed to parse ExecuteQuery responses PiperOrigin-RevId: 651386373 Source-Link: https://github.com/googleapis/googleapis/commit/a5be6fa5ff1603b2cab067408e2640d270f0e300 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d467ce893a04c41e504983346c215d41fd263650 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZDQ2N2NlODkzYTA0YzQxZTUwNDk4MzM0NmMyMTVkNDFmZDI2MzY1MCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update setup.py to match googleapis/gapic-generator-python/blob/main/gapic/templates/setup.py.j2 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update constraints --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../bigtable_instance_admin/async_client.py | 1 + .../transports/base.py | 4 +- .../transports/grpc.py | 3 +- .../transports/grpc_asyncio.py | 3 +- .../bigtable_table_admin/async_client.py | 1 + .../bigtable_table_admin/transports/base.py | 4 +- .../bigtable_table_admin/transports/grpc.py | 3 +- .../transports/grpc_asyncio.py | 3 +- google/cloud/bigtable_v2/__init__.py | 22 + google/cloud/bigtable_v2/gapic_metadata.json | 15 + .../services/bigtable/async_client.py | 104 + .../bigtable_v2/services/bigtable/client.py | 107 + .../services/bigtable/transports/base.py | 18 +- .../services/bigtable/transports/grpc.py | 30 +- .../bigtable/transports/grpc_asyncio.py | 37 +- .../services/bigtable/transports/rest.py | 131 + google/cloud/bigtable_v2/types/__init__.py | 24 + google/cloud/bigtable_v2/types/bigtable.py | 123 + google/cloud/bigtable_v2/types/data.py | 292 ++- google/cloud/bigtable_v2/types/types.py | 561 +++++ scripts/fixup_bigtable_v2_keywords.py | 1 + setup.py | 6 +- testing/constraints-3.7.txt | 5 +- testing/constraints-3.8.txt | 5 +- .../test_bigtable_instance_admin.py | 159 +- .../test_bigtable_table_admin.py | 234 +- tests/unit/gapic/bigtable_v2/test_bigtable.py | 2111 +++++++++++------ 27 files changed, 2935 insertions(+), 1072 deletions(-) create mode 100644 google/cloud/bigtable_v2/types/types.py diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py index 52c537260..171dd8298 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py @@ -38,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py index fc346c9bb..bc2f819b8 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py @@ -96,6 +96,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -108,7 +110,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py index 49a1b9e11..cc3e70986 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py @@ -132,7 +132,8 @@ def __init__( if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py index b85a696d9..1fa85551c 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py @@ -179,7 +179,8 @@ def __init__( if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py index 2747e4037..5e429f7e5 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py @@ -38,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py index 1ec3be85e..bb7875d87 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py @@ -96,6 +96,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -108,7 +110,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py index 01cec4e0b..71f06947f 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py @@ -134,7 +134,8 @@ def __init__( if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py index f20ed0a49..bdd6e20c8 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py @@ -181,7 +181,8 @@ def __init__( if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/google/cloud/bigtable_v2/__init__.py b/google/cloud/bigtable_v2/__init__.py index 56748d882..f2b3ddf28 100644 --- a/google/cloud/bigtable_v2/__init__.py +++ b/google/cloud/bigtable_v2/__init__.py @@ -23,6 +23,8 @@ from .types.bigtable import CheckAndMutateRowRequest from .types.bigtable import CheckAndMutateRowResponse +from .types.bigtable import ExecuteQueryRequest +from .types.bigtable import ExecuteQueryResponse from .types.bigtable import GenerateInitialChangeStreamPartitionsRequest from .types.bigtable import GenerateInitialChangeStreamPartitionsResponse from .types.bigtable import MutateRowRequest @@ -40,12 +42,20 @@ from .types.bigtable import ReadRowsResponse from .types.bigtable import SampleRowKeysRequest from .types.bigtable import SampleRowKeysResponse +from .types.data import ArrayValue from .types.data import Cell from .types.data import Column +from .types.data import ColumnMetadata from .types.data import ColumnRange from .types.data import Family from .types.data import Mutation +from .types.data import PartialResultSet +from .types.data import ProtoFormat +from .types.data import ProtoRows +from .types.data import ProtoRowsBatch +from .types.data import ProtoSchema from .types.data import ReadModifyWriteRule +from .types.data import ResultSetMetadata from .types.data import Row from .types.data import RowFilter from .types.data import RowRange @@ -62,15 +72,20 @@ from .types.request_stats import RequestLatencyStats from .types.request_stats import RequestStats from .types.response_params import ResponseParams +from .types.types import Type __all__ = ( "BigtableAsyncClient", + "ArrayValue", "BigtableClient", "Cell", "CheckAndMutateRowRequest", "CheckAndMutateRowResponse", "Column", + "ColumnMetadata", "ColumnRange", + "ExecuteQueryRequest", + "ExecuteQueryResponse", "Family", "FeatureFlags", "FullReadStatsView", @@ -81,8 +96,13 @@ "MutateRowsRequest", "MutateRowsResponse", "Mutation", + "PartialResultSet", "PingAndWarmRequest", "PingAndWarmResponse", + "ProtoFormat", + "ProtoRows", + "ProtoRowsBatch", + "ProtoSchema", "RateLimitInfo", "ReadChangeStreamRequest", "ReadChangeStreamResponse", @@ -95,6 +115,7 @@ "RequestLatencyStats", "RequestStats", "ResponseParams", + "ResultSetMetadata", "Row", "RowFilter", "RowRange", @@ -105,6 +126,7 @@ "StreamContinuationTokens", "StreamPartition", "TimestampRange", + "Type", "Value", "ValueRange", ) diff --git a/google/cloud/bigtable_v2/gapic_metadata.json b/google/cloud/bigtable_v2/gapic_metadata.json index 181dc8ff5..fd47c0435 100644 --- a/google/cloud/bigtable_v2/gapic_metadata.json +++ b/google/cloud/bigtable_v2/gapic_metadata.json @@ -15,6 +15,11 @@ "check_and_mutate_row" ] }, + "ExecuteQuery": { + "methods": [ + "execute_query" + ] + }, "GenerateInitialChangeStreamPartitions": { "methods": [ "generate_initial_change_stream_partitions" @@ -65,6 +70,11 @@ "check_and_mutate_row" ] }, + "ExecuteQuery": { + "methods": [ + "execute_query" + ] + }, "GenerateInitialChangeStreamPartitions": { "methods": [ "generate_initial_change_stream_partitions" @@ -115,6 +125,11 @@ "check_and_mutate_row" ] }, + "ExecuteQuery": { + "methods": [ + "execute_query" + ] + }, "GenerateInitialChangeStreamPartitions": { "methods": [ "generate_initial_change_stream_partitions" diff --git a/google/cloud/bigtable_v2/services/bigtable/async_client.py b/google/cloud/bigtable_v2/services/bigtable/async_client.py index 70daa63e3..12432dda7 100644 --- a/google/cloud/bigtable_v2/services/bigtable/async_client.py +++ b/google/cloud/bigtable_v2/services/bigtable/async_client.py @@ -40,6 +40,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -1293,6 +1294,109 @@ def read_change_stream( # Done; return the response. return response + def execute_query( + self, + request: Optional[Union[bigtable.ExecuteQueryRequest, dict]] = None, + *, + instance_name: Optional[str] = None, + query: Optional[str] = None, + app_profile_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[bigtable.ExecuteQueryResponse]]: + r"""Executes a BTQL query against a particular Cloud + Bigtable instance. + + Args: + request (Optional[Union[google.cloud.bigtable_v2.types.ExecuteQueryRequest, dict]]): + The request object. Request message for + Bigtable.ExecuteQuery + instance_name (:class:`str`): + Required. The unique name of the instance against which + the query should be executed. Values are of the form + ``projects//instances/`` + + This corresponds to the ``instance_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Required. The query string. + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + app_profile_id (:class:`str`): + Optional. This value specifies routing for replication. + If not specified, the ``default`` application profile + will be used. + + This corresponds to the ``app_profile_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.bigtable_v2.types.ExecuteQueryResponse]: + Response message for + Bigtable.ExecuteQuery + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance_name, query, app_profile_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable.ExecuteQueryRequest): + request = bigtable.ExecuteQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_name is not None: + request.instance_name = instance_name + if query is not None: + request.query = query + if app_profile_id is not None: + request.app_profile_id = app_profile_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.execute_query + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance_name", request.instance_name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "BigtableAsyncClient": return self diff --git a/google/cloud/bigtable_v2/services/bigtable/client.py b/google/cloud/bigtable_v2/services/bigtable/client.py index 7eda705b9..0937c90fe 100644 --- a/google/cloud/bigtable_v2/services/bigtable/client.py +++ b/google/cloud/bigtable_v2/services/bigtable/client.py @@ -1844,6 +1844,113 @@ def read_change_stream( # Done; return the response. return response + def execute_query( + self, + request: Optional[Union[bigtable.ExecuteQueryRequest, dict]] = None, + *, + instance_name: Optional[str] = None, + query: Optional[str] = None, + app_profile_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[bigtable.ExecuteQueryResponse]: + r"""Executes a BTQL query against a particular Cloud + Bigtable instance. + + Args: + request (Union[google.cloud.bigtable_v2.types.ExecuteQueryRequest, dict]): + The request object. Request message for + Bigtable.ExecuteQuery + instance_name (str): + Required. The unique name of the instance against which + the query should be executed. Values are of the form + ``projects//instances/`` + + This corresponds to the ``instance_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Required. The query string. + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + app_profile_id (str): + Optional. This value specifies routing for replication. + If not specified, the ``default`` application profile + will be used. + + This corresponds to the ``app_profile_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.bigtable_v2.types.ExecuteQueryResponse]: + Response message for + Bigtable.ExecuteQuery + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance_name, query, app_profile_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable.ExecuteQueryRequest): + request = bigtable.ExecuteQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_name is not None: + request.instance_name = instance_name + if query is not None: + request.query = query + if app_profile_id is not None: + request.app_profile_id = app_profile_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_query] + + header_params = {} + + routing_param_regex = re.compile("^(?Pprojects/[^/]+/instances/[^/]+)$") + regex_match = routing_param_regex.match(request.instance_name) + if regex_match and regex_match.group("name"): + header_params["name"] = regex_match.group("name") + + if request.app_profile_id: + header_params["app_profile_id"] = request.app_profile_id + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "BigtableClient": return self diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/base.py b/google/cloud/bigtable_v2/services/bigtable/transports/base.py index d93379723..17ff3fb3d 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/base.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/base.py @@ -89,6 +89,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -101,7 +103,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) @@ -189,6 +191,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=43200.0, client_info=client_info, ), + self.execute_query: gapic_v1.method.wrap_method( + self.execute_query, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -295,6 +302,15 @@ def read_change_stream( ]: raise NotImplementedError() + @property + def execute_query( + self, + ) -> Callable[ + [bigtable.ExecuteQueryRequest], + Union[bigtable.ExecuteQueryResponse, Awaitable[bigtable.ExecuteQueryResponse]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py b/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py index 2a1a9a284..febdd441d 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py @@ -123,7 +123,8 @@ def __init__( if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -508,6 +509,33 @@ def read_change_stream( ) return self._stubs["read_change_stream"] + @property + def execute_query( + self, + ) -> Callable[[bigtable.ExecuteQueryRequest], bigtable.ExecuteQueryResponse]: + r"""Return a callable for the execute query method over gRPC. + + Executes a BTQL query against a particular Cloud + Bigtable instance. + + Returns: + Callable[[~.ExecuteQueryRequest], + ~.ExecuteQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_query" not in self._stubs: + self._stubs["execute_query"] = self.grpc_channel.unary_stream( + "/google.bigtable.v2.Bigtable/ExecuteQuery", + request_serializer=bigtable.ExecuteQueryRequest.serialize, + response_deserializer=bigtable.ExecuteQueryResponse.deserialize, + ) + return self._stubs["execute_query"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py b/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py index 2d04f79af..40d6a3fa4 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py @@ -170,7 +170,8 @@ def __init__( if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -518,6 +519,35 @@ def read_change_stream( ) return self._stubs["read_change_stream"] + @property + def execute_query( + self, + ) -> Callable[ + [bigtable.ExecuteQueryRequest], Awaitable[bigtable.ExecuteQueryResponse] + ]: + r"""Return a callable for the execute query method over gRPC. + + Executes a BTQL query against a particular Cloud + Bigtable instance. + + Returns: + Callable[[~.ExecuteQueryRequest], + Awaitable[~.ExecuteQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_query" not in self._stubs: + self._stubs["execute_query"] = self.grpc_channel.unary_stream( + "/google.bigtable.v2.Bigtable/ExecuteQuery", + request_serializer=bigtable.ExecuteQueryRequest.serialize, + response_deserializer=bigtable.ExecuteQueryResponse.deserialize, + ) + return self._stubs["execute_query"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -576,6 +606,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=43200.0, client_info=client_info, ), + self.execute_query: gapic_v1.method_async.wrap_method( + self.execute_query, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/rest.py b/google/cloud/bigtable_v2/services/bigtable/transports/rest.py index a4d8e0ce9..a3391005f 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/rest.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/rest.py @@ -74,6 +74,14 @@ def post_check_and_mutate_row(self, response): logging.log(f"Received response: {response}") return response + def pre_execute_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_query(self, response): + logging.log(f"Received response: {response}") + return response + def pre_generate_initial_change_stream_partitions(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -167,6 +175,27 @@ def post_check_and_mutate_row( """ return response + def pre_execute_query( + self, request: bigtable.ExecuteQueryRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[bigtable.ExecuteQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for execute_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the Bigtable server. + """ + return request, metadata + + def post_execute_query( + self, response: rest_streaming.ResponseIterator + ) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for execute_query + + Override in a subclass to manipulate the response + after it is returned by the Bigtable server but before + it is returned to user code. + """ + return response + def pre_generate_initial_change_stream_partitions( self, request: bigtable.GenerateInitialChangeStreamPartitionsRequest, @@ -545,6 +574,100 @@ def __call__( resp = self._interceptor.post_check_and_mutate_row(resp) return resp + class _ExecuteQuery(BigtableRestStub): + def __hash__(self): + return hash("ExecuteQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: bigtable.ExecuteQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rest_streaming.ResponseIterator: + r"""Call the execute query method over HTTP. + + Args: + request (~.bigtable.ExecuteQueryRequest): + The request object. Request message for + Bigtable.ExecuteQuery + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.bigtable.ExecuteQueryResponse: + Response message for + Bigtable.ExecuteQuery + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{instance_name=projects/*/instances/*}:executeQuery", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_execute_query(request, metadata) + pb_request = bigtable.ExecuteQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator( + response, bigtable.ExecuteQueryResponse + ) + resp = self._interceptor.post_execute_query(resp) + return resp + class _GenerateInitialChangeStreamPartitions(BigtableRestStub): def __hash__(self): return hash("GenerateInitialChangeStreamPartitions") @@ -1324,6 +1447,14 @@ def check_and_mutate_row( # In C++ this would require a dynamic_cast return self._CheckAndMutateRow(self._session, self._host, self._interceptor) # type: ignore + @property + def execute_query( + self, + ) -> Callable[[bigtable.ExecuteQueryRequest], bigtable.ExecuteQueryResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteQuery(self._session, self._host, self._interceptor) # type: ignore + @property def generate_initial_change_stream_partitions( self, diff --git a/google/cloud/bigtable_v2/types/__init__.py b/google/cloud/bigtable_v2/types/__init__.py index a7961a910..e524627cd 100644 --- a/google/cloud/bigtable_v2/types/__init__.py +++ b/google/cloud/bigtable_v2/types/__init__.py @@ -16,6 +16,8 @@ from .bigtable import ( CheckAndMutateRowRequest, CheckAndMutateRowResponse, + ExecuteQueryRequest, + ExecuteQueryResponse, GenerateInitialChangeStreamPartitionsRequest, GenerateInitialChangeStreamPartitionsResponse, MutateRowRequest, @@ -35,12 +37,20 @@ SampleRowKeysResponse, ) from .data import ( + ArrayValue, Cell, Column, + ColumnMetadata, ColumnRange, Family, Mutation, + PartialResultSet, + ProtoFormat, + ProtoRows, + ProtoRowsBatch, + ProtoSchema, ReadModifyWriteRule, + ResultSetMetadata, Row, RowFilter, RowRange, @@ -64,10 +74,15 @@ from .response_params import ( ResponseParams, ) +from .types import ( + Type, +) __all__ = ( "CheckAndMutateRowRequest", "CheckAndMutateRowResponse", + "ExecuteQueryRequest", + "ExecuteQueryResponse", "GenerateInitialChangeStreamPartitionsRequest", "GenerateInitialChangeStreamPartitionsResponse", "MutateRowRequest", @@ -85,12 +100,20 @@ "ReadRowsResponse", "SampleRowKeysRequest", "SampleRowKeysResponse", + "ArrayValue", "Cell", "Column", + "ColumnMetadata", "ColumnRange", "Family", "Mutation", + "PartialResultSet", + "ProtoFormat", + "ProtoRows", + "ProtoRowsBatch", + "ProtoSchema", "ReadModifyWriteRule", + "ResultSetMetadata", "Row", "RowFilter", "RowRange", @@ -107,4 +130,5 @@ "RequestLatencyStats", "RequestStats", "ResponseParams", + "Type", ) diff --git a/google/cloud/bigtable_v2/types/bigtable.py b/google/cloud/bigtable_v2/types/bigtable.py index fa6c566a2..3818decb6 100644 --- a/google/cloud/bigtable_v2/types/bigtable.py +++ b/google/cloud/bigtable_v2/types/bigtable.py @@ -49,6 +49,8 @@ "GenerateInitialChangeStreamPartitionsResponse", "ReadChangeStreamRequest", "ReadChangeStreamResponse", + "ExecuteQueryRequest", + "ExecuteQueryResponse", }, ) @@ -1258,4 +1260,125 @@ class CloseStream(proto.Message): ) +class ExecuteQueryRequest(proto.Message): + r"""Request message for Bigtable.ExecuteQuery + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_name (str): + Required. The unique name of the instance against which the + query should be executed. Values are of the form + ``projects//instances/`` + app_profile_id (str): + Optional. This value specifies routing for replication. If + not specified, the ``default`` application profile will be + used. + query (str): + Required. The query string. + proto_format (google.cloud.bigtable_v2.types.ProtoFormat): + Protocol buffer format as described by + ProtoSchema and ProtoRows messages. + + This field is a member of `oneof`_ ``data_format``. + resume_token (bytes): + Optional. If this request is resuming a previously + interrupted query execution, ``resume_token`` should be + copied from the last PartialResultSet yielded before the + interruption. Doing this enables the query execution to + resume where the last one left off. The rest of the request + parameters must exactly match the request that yielded this + token. Otherwise the request will fail. + params (MutableMapping[str, google.cloud.bigtable_v2.types.Value]): + Required. params contains string type keys and Bigtable type + values that bind to placeholders in the query string. In + query string, a parameter placeholder consists of the ``@`` + character followed by the parameter name (for example, + ``@firstName``) in the query string. + + For example, if + ``params["firstName"] = bytes_value: "foo" type {bytes_type {}}`` + then ``@firstName`` will be replaced with googlesql bytes + value "foo" in the query string during query evaluation. + + In case of Value.kind is not set, it will be set to + corresponding null value in googlesql. + ``params["firstName"] = type {string_type {}}`` then + ``@firstName`` will be replaced with googlesql null string. + + Value.type should always be set and no inference of type + will be made from Value.kind. If Value.type is not set, we + will return INVALID_ARGUMENT error. + """ + + instance_name: str = proto.Field( + proto.STRING, + number=1, + ) + app_profile_id: str = proto.Field( + proto.STRING, + number=2, + ) + query: str = proto.Field( + proto.STRING, + number=3, + ) + proto_format: data.ProtoFormat = proto.Field( + proto.MESSAGE, + number=4, + oneof="data_format", + message=data.ProtoFormat, + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=8, + ) + params: MutableMapping[str, data.Value] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=7, + message=data.Value, + ) + + +class ExecuteQueryResponse(proto.Message): + r"""Response message for Bigtable.ExecuteQuery + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + metadata (google.cloud.bigtable_v2.types.ResultSetMetadata): + Structure of rows in this response stream. + The first (and only the first) response streamed + from the server will be of this type. + + This field is a member of `oneof`_ ``response``. + results (google.cloud.bigtable_v2.types.PartialResultSet): + A partial result set with row data + potentially including additional instructions on + how recent past and future partial responses + should be interpreted. + + This field is a member of `oneof`_ ``response``. + """ + + metadata: data.ResultSetMetadata = proto.Field( + proto.MESSAGE, + number=1, + oneof="response", + message=data.ResultSetMetadata, + ) + results: data.PartialResultSet = proto.Field( + proto.MESSAGE, + number=2, + oneof="response", + message=data.PartialResultSet, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigtable_v2/types/data.py b/google/cloud/bigtable_v2/types/data.py index b2b853c64..ec32cac82 100644 --- a/google/cloud/bigtable_v2/types/data.py +++ b/google/cloud/bigtable_v2/types/data.py @@ -19,6 +19,10 @@ import proto # type: ignore +from google.cloud.bigtable_v2.types import types +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore + __protobuf__ = proto.module( package="google.bigtable.v2", @@ -28,6 +32,7 @@ "Column", "Cell", "Value", + "ArrayValue", "RowRange", "RowSet", "ColumnRange", @@ -39,6 +44,13 @@ "StreamPartition", "StreamContinuationTokens", "StreamContinuationToken", + "ProtoFormat", + "ColumnMetadata", + "ProtoSchema", + "ResultSetMetadata", + "ProtoRows", + "ProtoRowsBatch", + "PartialResultSet", }, ) @@ -179,6 +191,23 @@ class Value(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + type_ (google.cloud.bigtable_v2.types.Type): + The verified ``Type`` of this ``Value``, if it cannot be + inferred. + + Read results will never specify the encoding for ``type`` + since the value will already have been decoded by the + server. Furthermore, the ``type`` will be omitted entirely + if it can be inferred from a previous response. The exact + semantics for inferring ``type`` will vary, and are + therefore documented separately for each read method. + + When using composite types (Struct, Array, Map) only the + outermost ``Value`` will specify the ``type``. This + top-level ``type`` will define the types for any nested + ``Struct' fields,``\ Array\ ``elements, or``\ Map\ ``key/value pairs. If a nested``\ Value\ ``provides a``\ type\` + on write, the request will be rejected with + INVALID_ARGUMENT. raw_value (bytes): Represents a raw byte sequence with no type information. The ``type`` field must be omitted. @@ -188,14 +217,58 @@ class Value(proto.Message): Represents a raw cell timestamp with no type information. The ``type`` field must be omitted. + This field is a member of `oneof`_ ``kind``. + bytes_value (bytes): + Represents a typed value transported as a + byte sequence. + + This field is a member of `oneof`_ ``kind``. + string_value (str): + Represents a typed value transported as a + string. + This field is a member of `oneof`_ ``kind``. int_value (int): - Represents a typed value transported as an integer. Default - type for writes: ``Int64`` + Represents a typed value transported as an + integer. + + This field is a member of `oneof`_ ``kind``. + bool_value (bool): + Represents a typed value transported as a + boolean. + + This field is a member of `oneof`_ ``kind``. + float_value (float): + Represents a typed value transported as a + floating point number. + + This field is a member of `oneof`_ ``kind``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + Represents a typed value transported as a + timestamp. + + This field is a member of `oneof`_ ``kind``. + date_value (google.type.date_pb2.Date): + Represents a typed value transported as a + date. + + This field is a member of `oneof`_ ``kind``. + array_value (google.cloud.bigtable_v2.types.ArrayValue): + Represents a typed value transported as a sequence of + values. To differentiate between ``Struct``, ``Array``, and + ``Map``, the outermost ``Value`` must provide an explicit + ``type`` on write. This ``type`` will apply recursively to + the nested ``Struct`` fields, ``Array`` elements, or ``Map`` + key/value pairs, which *must not* supply their own ``type``. This field is a member of `oneof`_ ``kind``. """ + type_: types.Type = proto.Field( + proto.MESSAGE, + number=7, + message=types.Type, + ) raw_value: bytes = proto.Field( proto.BYTES, number=8, @@ -206,11 +279,64 @@ class Value(proto.Message): number=9, oneof="kind", ) + bytes_value: bytes = proto.Field( + proto.BYTES, + number=2, + oneof="kind", + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof="kind", + ) int_value: int = proto.Field( proto.INT64, number=6, oneof="kind", ) + bool_value: bool = proto.Field( + proto.BOOL, + number=10, + oneof="kind", + ) + float_value: float = proto.Field( + proto.DOUBLE, + number=11, + oneof="kind", + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + oneof="kind", + message=timestamp_pb2.Timestamp, + ) + date_value: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=13, + oneof="kind", + message=date_pb2.Date, + ) + array_value: "ArrayValue" = proto.Field( + proto.MESSAGE, + number=4, + oneof="kind", + message="ArrayValue", + ) + + +class ArrayValue(proto.Message): + r"""``ArrayValue`` is an ordered list of ``Value``. + + Attributes: + values (MutableSequence[google.cloud.bigtable_v2.types.Value]): + The ordered elements in the array. + """ + + values: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) class RowRange(proto.Message): @@ -1199,4 +1325,166 @@ class StreamContinuationToken(proto.Message): ) +class ProtoFormat(proto.Message): + r"""Protocol buffers format descriptor, as described by Messages + ProtoSchema and ProtoRows + + """ + + +class ColumnMetadata(proto.Message): + r"""Describes a column in a Bigtable Query Language result set. + + Attributes: + name (str): + The name of the column. + type_ (google.cloud.bigtable_v2.types.Type): + The type of the column. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: types.Type = proto.Field( + proto.MESSAGE, + number=2, + message=types.Type, + ) + + +class ProtoSchema(proto.Message): + r"""ResultSet schema in proto format + + Attributes: + columns (MutableSequence[google.cloud.bigtable_v2.types.ColumnMetadata]): + The columns in the result set. + """ + + columns: MutableSequence["ColumnMetadata"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ColumnMetadata", + ) + + +class ResultSetMetadata(proto.Message): + r"""Describes the structure of a Bigtable result set. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + proto_schema (google.cloud.bigtable_v2.types.ProtoSchema): + Schema in proto format + + This field is a member of `oneof`_ ``schema``. + """ + + proto_schema: "ProtoSchema" = proto.Field( + proto.MESSAGE, + number=1, + oneof="schema", + message="ProtoSchema", + ) + + +class ProtoRows(proto.Message): + r"""Rows represented in proto format. + + This should be constructed by concatenating the ``batch_data`` from + each of the relevant ``ProtoRowsBatch`` messages and parsing the + result as a ``ProtoRows`` message. + + Attributes: + values (MutableSequence[google.cloud.bigtable_v2.types.Value]): + A proto rows message consists of a list of values. Every N + complete values defines a row, where N is equal to the + number of entries in the ``metadata.proto_schema.columns`` + value received in the first response. + """ + + values: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Value", + ) + + +class ProtoRowsBatch(proto.Message): + r"""Batch of serialized ProtoRows. + + Attributes: + batch_data (bytes): + Merge partial results by concatenating these bytes, then + parsing the overall value as a ``ProtoRows`` message. + """ + + batch_data: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class PartialResultSet(proto.Message): + r"""A partial result set from the streaming query API. CBT client will + buffer partial_rows from result_sets until it gets a + resumption_token. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + proto_rows_batch (google.cloud.bigtable_v2.types.ProtoRowsBatch): + Partial rows in serialized ProtoRows format. + + This field is a member of `oneof`_ ``partial_rows``. + resume_token (bytes): + An opaque token sent by the server to allow query resumption + and signal the client to accumulate ``partial_rows`` since + the last non-empty ``resume_token``. On resumption, the + resumed query will return the remaining rows for this query. + + If there is a batch in progress, a non-empty + ``resume_token`` means that that the batch of + ``partial_rows`` will be complete after merging the + ``partial_rows`` from this response. The client must only + yield completed batches to the application, and must ensure + that any future retries send the latest token to avoid + returning duplicate data. + + The server may set 'resume_token' without a 'partial_rows'. + If there is a batch in progress the client should yield it. + + The server will also send a sentinel ``resume_token`` when + last batch of ``partial_rows`` is sent. If the client + retries the ExecuteQueryRequest with the sentinel + ``resume_token``, the server will emit it again without any + ``partial_rows``, then return OK. + estimated_batch_size (int): + Estimated size of a new batch. The server will always set + this when returning the first ``partial_rows`` of a batch, + and will not set it at any other time. + + The client can use this estimate to allocate an initial + buffer for the batched results. This helps minimize the + number of allocations required, though the buffer size may + still need to be increased if the estimate is too low. + """ + + proto_rows_batch: "ProtoRowsBatch" = proto.Field( + proto.MESSAGE, + number=3, + oneof="partial_rows", + message="ProtoRowsBatch", + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=5, + ) + estimated_batch_size: int = proto.Field( + proto.INT32, + number=4, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigtable_v2/types/types.py b/google/cloud/bigtable_v2/types/types.py new file mode 100644 index 000000000..8eb307b3e --- /dev/null +++ b/google/cloud/bigtable_v2/types/types.py @@ -0,0 +1,561 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.bigtable.v2", + manifest={ + "Type", + }, +) + + +class Type(proto.Message): + r"""``Type`` represents the type of data that is written to, read from, + or stored in Bigtable. It is heavily based on the GoogleSQL standard + to help maintain familiarity and consistency across products and + features. + + For compatibility with Bigtable's existing untyped APIs, each + ``Type`` includes an ``Encoding`` which describes how to convert + to/from the underlying data. + + Each encoding also defines the following properties: + + - Order-preserving: Does the encoded value sort consistently with + the original typed value? Note that Bigtable will always sort + data based on the raw encoded value, *not* the decoded type. + + - Example: BYTES values sort in the same order as their raw + encodings. + - Counterexample: Encoding INT64 as a fixed-width decimal string + does *not* preserve sort order when dealing with negative + numbers. ``INT64(1) > INT64(-1)``, but + ``STRING("-00001") > STRING("00001)``. + + - Self-delimiting: If we concatenate two encoded values, can we + always tell where the first one ends and the second one begins? + + - Example: If we encode INT64s to fixed-width STRINGs, the first + value will always contain exactly N digits, possibly preceded + by a sign. + - Counterexample: If we concatenate two UTF-8 encoded STRINGs, + we have no way to tell where the first one ends. + + - Compatibility: Which other systems have matching encoding + schemes? For example, does this encoding have a GoogleSQL + equivalent? HBase? Java? + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bytes_type (google.cloud.bigtable_v2.types.Type.Bytes): + Bytes + + This field is a member of `oneof`_ ``kind``. + string_type (google.cloud.bigtable_v2.types.Type.String): + String + + This field is a member of `oneof`_ ``kind``. + int64_type (google.cloud.bigtable_v2.types.Type.Int64): + Int64 + + This field is a member of `oneof`_ ``kind``. + float32_type (google.cloud.bigtable_v2.types.Type.Float32): + Float32 + + This field is a member of `oneof`_ ``kind``. + float64_type (google.cloud.bigtable_v2.types.Type.Float64): + Float64 + + This field is a member of `oneof`_ ``kind``. + bool_type (google.cloud.bigtable_v2.types.Type.Bool): + Bool + + This field is a member of `oneof`_ ``kind``. + timestamp_type (google.cloud.bigtable_v2.types.Type.Timestamp): + Timestamp + + This field is a member of `oneof`_ ``kind``. + date_type (google.cloud.bigtable_v2.types.Type.Date): + Date + + This field is a member of `oneof`_ ``kind``. + aggregate_type (google.cloud.bigtable_v2.types.Type.Aggregate): + Aggregate + + This field is a member of `oneof`_ ``kind``. + struct_type (google.cloud.bigtable_v2.types.Type.Struct): + Struct + + This field is a member of `oneof`_ ``kind``. + array_type (google.cloud.bigtable_v2.types.Type.Array): + Array + + This field is a member of `oneof`_ ``kind``. + map_type (google.cloud.bigtable_v2.types.Type.Map): + Map + + This field is a member of `oneof`_ ``kind``. + """ + + class Bytes(proto.Message): + r"""Bytes Values of type ``Bytes`` are stored in ``Value.bytes_value``. + + Attributes: + encoding (google.cloud.bigtable_v2.types.Type.Bytes.Encoding): + The encoding to use when converting to/from + lower level types. + """ + + class Encoding(proto.Message): + r"""Rules used to convert to/from lower level types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + raw (google.cloud.bigtable_v2.types.Type.Bytes.Encoding.Raw): + Use ``Raw`` encoding. + + This field is a member of `oneof`_ ``encoding``. + """ + + class Raw(proto.Message): + r"""Leaves the value "as-is" + + - Order-preserving? Yes + - Self-delimiting? No + - Compatibility? N/A + + """ + + raw: "Type.Bytes.Encoding.Raw" = proto.Field( + proto.MESSAGE, + number=1, + oneof="encoding", + message="Type.Bytes.Encoding.Raw", + ) + + encoding: "Type.Bytes.Encoding" = proto.Field( + proto.MESSAGE, + number=1, + message="Type.Bytes.Encoding", + ) + + class String(proto.Message): + r"""String Values of type ``String`` are stored in + ``Value.string_value``. + + Attributes: + encoding (google.cloud.bigtable_v2.types.Type.String.Encoding): + The encoding to use when converting to/from + lower level types. + """ + + class Encoding(proto.Message): + r"""Rules used to convert to/from lower level types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + utf8_bytes (google.cloud.bigtable_v2.types.Type.String.Encoding.Utf8Bytes): + Use ``Utf8Bytes`` encoding. + + This field is a member of `oneof`_ ``encoding``. + """ + + class Utf8Bytes(proto.Message): + r"""UTF-8 encoding + + - Order-preserving? Yes (code point order) + - Self-delimiting? No + - Compatibility? + + - BigQuery Federation ``TEXT`` encoding + - HBase ``Bytes.toBytes`` + - Java ``String#getBytes(StandardCharsets.UTF_8)`` + + """ + + utf8_bytes: "Type.String.Encoding.Utf8Bytes" = proto.Field( + proto.MESSAGE, + number=2, + oneof="encoding", + message="Type.String.Encoding.Utf8Bytes", + ) + + encoding: "Type.String.Encoding" = proto.Field( + proto.MESSAGE, + number=1, + message="Type.String.Encoding", + ) + + class Int64(proto.Message): + r"""Int64 Values of type ``Int64`` are stored in ``Value.int_value``. + + Attributes: + encoding (google.cloud.bigtable_v2.types.Type.Int64.Encoding): + The encoding to use when converting to/from + lower level types. + """ + + class Encoding(proto.Message): + r"""Rules used to convert to/from lower level types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + big_endian_bytes (google.cloud.bigtable_v2.types.Type.Int64.Encoding.BigEndianBytes): + Use ``BigEndianBytes`` encoding. + + This field is a member of `oneof`_ ``encoding``. + """ + + class BigEndianBytes(proto.Message): + r"""Encodes the value as an 8-byte big endian twos complement ``Bytes`` + value. + + - Order-preserving? No (positive values only) + - Self-delimiting? Yes + - Compatibility? + + - BigQuery Federation ``BINARY`` encoding + - HBase ``Bytes.toBytes`` + - Java ``ByteBuffer.putLong()`` with ``ByteOrder.BIG_ENDIAN`` + + Attributes: + bytes_type (google.cloud.bigtable_v2.types.Type.Bytes): + Deprecated: ignored if set. + """ + + bytes_type: "Type.Bytes" = proto.Field( + proto.MESSAGE, + number=1, + message="Type.Bytes", + ) + + big_endian_bytes: "Type.Int64.Encoding.BigEndianBytes" = proto.Field( + proto.MESSAGE, + number=1, + oneof="encoding", + message="Type.Int64.Encoding.BigEndianBytes", + ) + + encoding: "Type.Int64.Encoding" = proto.Field( + proto.MESSAGE, + number=1, + message="Type.Int64.Encoding", + ) + + class Bool(proto.Message): + r"""bool Values of type ``Bool`` are stored in ``Value.bool_value``.""" + + class Float32(proto.Message): + r"""Float32 Values of type ``Float32`` are stored in + ``Value.float_value``. + + """ + + class Float64(proto.Message): + r"""Float64 Values of type ``Float64`` are stored in + ``Value.float_value``. + + """ + + class Timestamp(proto.Message): + r"""Timestamp Values of type ``Timestamp`` are stored in + ``Value.timestamp_value``. + + """ + + class Date(proto.Message): + r"""Date Values of type ``Date`` are stored in ``Value.date_value``.""" + + class Struct(proto.Message): + r"""A structured data value, consisting of fields which map to + dynamically typed values. Values of type ``Struct`` are stored in + ``Value.array_value`` where entries are in the same order and number + as ``field_types``. + + Attributes: + fields (MutableSequence[google.cloud.bigtable_v2.types.Type.Struct.Field]): + The names and types of the fields in this + struct. + """ + + class Field(proto.Message): + r"""A struct field and its type. + + Attributes: + field_name (str): + The field name (optional). Fields without a ``field_name`` + are considered anonymous and cannot be referenced by name. + type_ (google.cloud.bigtable_v2.types.Type): + The type of values in this field. + """ + + field_name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: "Type" = proto.Field( + proto.MESSAGE, + number=2, + message="Type", + ) + + fields: MutableSequence["Type.Struct.Field"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Type.Struct.Field", + ) + + class Array(proto.Message): + r"""An ordered list of elements of a given type. Values of type + ``Array`` are stored in ``Value.array_value``. + + Attributes: + element_type (google.cloud.bigtable_v2.types.Type): + The type of the elements in the array. This must not be + ``Array``. + """ + + element_type: "Type" = proto.Field( + proto.MESSAGE, + number=1, + message="Type", + ) + + class Map(proto.Message): + r"""A mapping of keys to values of a given type. Values of type ``Map`` + are stored in a ``Value.array_value`` where each entry is another + ``Value.array_value`` with two elements (the key and the value, in + that order). Normally encoded Map values won't have repeated keys, + however, clients are expected to handle the case in which they do. + If the same key appears multiple times, the *last* value takes + precedence. + + Attributes: + key_type (google.cloud.bigtable_v2.types.Type): + The type of a map key. Only ``Bytes``, ``String``, and + ``Int64`` are allowed as key types. + value_type (google.cloud.bigtable_v2.types.Type): + The type of the values in a map. + """ + + key_type: "Type" = proto.Field( + proto.MESSAGE, + number=1, + message="Type", + ) + value_type: "Type" = proto.Field( + proto.MESSAGE, + number=2, + message="Type", + ) + + class Aggregate(proto.Message): + r"""A value that combines incremental updates into a summarized value. + + Data is never directly written or read using type ``Aggregate``. + Writes will provide either the ``input_type`` or ``state_type``, and + reads will always return the ``state_type`` . + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + input_type (google.cloud.bigtable_v2.types.Type): + Type of the inputs that are accumulated by this + ``Aggregate``, which must specify a full encoding. Use + ``AddInput`` mutations to accumulate new inputs. + state_type (google.cloud.bigtable_v2.types.Type): + Output only. Type that holds the internal accumulator state + for the ``Aggregate``. This is a function of the + ``input_type`` and ``aggregator`` chosen, and will always + specify a full encoding. + sum (google.cloud.bigtable_v2.types.Type.Aggregate.Sum): + Sum aggregator. + + This field is a member of `oneof`_ ``aggregator``. + hllpp_unique_count (google.cloud.bigtable_v2.types.Type.Aggregate.HyperLogLogPlusPlusUniqueCount): + HyperLogLogPlusPlusUniqueCount aggregator. + + This field is a member of `oneof`_ ``aggregator``. + max_ (google.cloud.bigtable_v2.types.Type.Aggregate.Max): + Max aggregator. + + This field is a member of `oneof`_ ``aggregator``. + min_ (google.cloud.bigtable_v2.types.Type.Aggregate.Min): + Min aggregator. + + This field is a member of `oneof`_ ``aggregator``. + """ + + class Sum(proto.Message): + r"""Computes the sum of the input values. Allowed input: ``Int64`` + State: same as input + + """ + + class Max(proto.Message): + r"""Computes the max of the input values. Allowed input: ``Int64`` + State: same as input + + """ + + class Min(proto.Message): + r"""Computes the min of the input values. Allowed input: ``Int64`` + State: same as input + + """ + + class HyperLogLogPlusPlusUniqueCount(proto.Message): + r"""Computes an approximate unique count over the input values. When + using raw data as input, be careful to use a consistent encoding. + Otherwise the same value encoded differently could count more than + once, or two distinct values could count as identical. Input: Any, + or omit for Raw State: TBD Special state conversions: ``Int64`` (the + unique count estimate) + + """ + + input_type: "Type" = proto.Field( + proto.MESSAGE, + number=1, + message="Type", + ) + state_type: "Type" = proto.Field( + proto.MESSAGE, + number=2, + message="Type", + ) + sum: "Type.Aggregate.Sum" = proto.Field( + proto.MESSAGE, + number=4, + oneof="aggregator", + message="Type.Aggregate.Sum", + ) + hllpp_unique_count: "Type.Aggregate.HyperLogLogPlusPlusUniqueCount" = ( + proto.Field( + proto.MESSAGE, + number=5, + oneof="aggregator", + message="Type.Aggregate.HyperLogLogPlusPlusUniqueCount", + ) + ) + max_: "Type.Aggregate.Max" = proto.Field( + proto.MESSAGE, + number=6, + oneof="aggregator", + message="Type.Aggregate.Max", + ) + min_: "Type.Aggregate.Min" = proto.Field( + proto.MESSAGE, + number=7, + oneof="aggregator", + message="Type.Aggregate.Min", + ) + + bytes_type: Bytes = proto.Field( + proto.MESSAGE, + number=1, + oneof="kind", + message=Bytes, + ) + string_type: String = proto.Field( + proto.MESSAGE, + number=2, + oneof="kind", + message=String, + ) + int64_type: Int64 = proto.Field( + proto.MESSAGE, + number=5, + oneof="kind", + message=Int64, + ) + float32_type: Float32 = proto.Field( + proto.MESSAGE, + number=12, + oneof="kind", + message=Float32, + ) + float64_type: Float64 = proto.Field( + proto.MESSAGE, + number=9, + oneof="kind", + message=Float64, + ) + bool_type: Bool = proto.Field( + proto.MESSAGE, + number=8, + oneof="kind", + message=Bool, + ) + timestamp_type: Timestamp = proto.Field( + proto.MESSAGE, + number=10, + oneof="kind", + message=Timestamp, + ) + date_type: Date = proto.Field( + proto.MESSAGE, + number=11, + oneof="kind", + message=Date, + ) + aggregate_type: Aggregate = proto.Field( + proto.MESSAGE, + number=6, + oneof="kind", + message=Aggregate, + ) + struct_type: Struct = proto.Field( + proto.MESSAGE, + number=7, + oneof="kind", + message=Struct, + ) + array_type: Array = proto.Field( + proto.MESSAGE, + number=3, + oneof="kind", + message=Array, + ) + map_type: Map = proto.Field( + proto.MESSAGE, + number=4, + oneof="kind", + message=Map, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/scripts/fixup_bigtable_v2_keywords.py b/scripts/fixup_bigtable_v2_keywords.py index 3d1381c49..218a54902 100644 --- a/scripts/fixup_bigtable_v2_keywords.py +++ b/scripts/fixup_bigtable_v2_keywords.py @@ -40,6 +40,7 @@ class bigtableCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'check_and_mutate_row': ('row_key', 'table_name', 'authorized_view_name', 'app_profile_id', 'predicate_filter', 'true_mutations', 'false_mutations', ), + 'execute_query': ('instance_name', 'query', 'params', 'app_profile_id', 'proto_format', 'resume_token', ), 'generate_initial_change_stream_partitions': ('table_name', 'app_profile_id', ), 'mutate_row': ('row_key', 'mutations', 'table_name', 'authorized_view_name', 'app_profile_id', ), 'mutate_rows': ('entries', 'table_name', 'authorized_view_name', 'app_profile_id', ), diff --git a/setup.py b/setup.py index 8b698a35b..c47167487 100644 --- a/setup.py +++ b/setup.py @@ -39,10 +39,10 @@ dependencies = [ "google-api-core[grpc] >= 2.16.0, <3.0.0dev", "google-cloud-core >= 1.4.4, <3.0.0dev", + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {"libcst": "libcst >= 0.2.5"} diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index c684ca534..5a3f3e3fc 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -6,9 +6,10 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 google-api-core==2.16.0 +google-auth==2.14.1 google-cloud-core==2.0.0 grpc-google-iam-v1==0.12.4 -proto-plus==1.22.0 +proto-plus==1.22.3 libcst==0.2.5 -protobuf==3.19.5 +protobuf==3.20.2 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index d96846bb5..fa7c56db1 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -6,8 +6,9 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 google-api-core==2.16.0 +google-auth==2.14.1 google-cloud-core==2.0.0 grpc-google-iam-v1==0.12.4 -proto-plus==1.22.0 +proto-plus==1.22.3 libcst==0.2.5 -protobuf==3.19.5 +protobuf==3.20.2 diff --git a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py index e0de275cc..64fa98937 100644 --- a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py +++ b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py @@ -1380,12 +1380,7 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_instance ] = mock_object @@ -1793,12 +1788,7 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_instance ] = mock_object @@ -2173,12 +2163,7 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_instances ] = mock_object @@ -2559,12 +2544,7 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_instance ] = mock_object @@ -2860,12 +2840,7 @@ async def test_partial_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.partial_update_instance ] = mock_object @@ -3244,12 +3219,7 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance ] = mock_object @@ -3606,12 +3576,7 @@ async def test_create_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_cluster ] = mock_object @@ -4009,12 +3974,7 @@ async def test_get_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_cluster ] = mock_object @@ -4389,12 +4349,7 @@ async def test_list_clusters_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_clusters ] = mock_object @@ -4762,12 +4717,7 @@ async def test_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_cluster ] = mock_object @@ -5058,12 +5008,7 @@ async def test_partial_update_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.partial_update_cluster ] = mock_object @@ -5442,12 +5387,7 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_cluster ] = mock_object @@ -5824,12 +5764,7 @@ async def test_create_app_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_app_profile ] = mock_object @@ -6231,12 +6166,7 @@ async def test_get_app_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_app_profile ] = mock_object @@ -6616,12 +6546,7 @@ async def test_list_app_profiles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_app_profiles ] = mock_object @@ -6873,13 +6798,13 @@ def test_list_app_profiles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_app_profiles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7203,12 +7128,7 @@ async def test_update_app_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_app_profile ] = mock_object @@ -7599,12 +7519,7 @@ async def test_delete_app_profile_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_app_profile ] = mock_object @@ -7973,12 +7888,7 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy ] = mock_object @@ -8360,12 +8270,7 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy ] = mock_object @@ -8757,12 +8662,7 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions ] = mock_object @@ -9170,12 +9070,7 @@ async def test_list_hot_tablets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_hot_tablets ] = mock_object @@ -9413,13 +9308,13 @@ def test_list_hot_tablets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_hot_tablets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py index 9676ce4fa..4c888da7c 100644 --- a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py +++ b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py @@ -1361,12 +1361,7 @@ async def test_create_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_table ] = mock_object @@ -1767,12 +1762,7 @@ async def test_create_table_from_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_table_from_snapshot ] = mock_object @@ -2170,12 +2160,7 @@ async def test_list_tables_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_tables ] = mock_object @@ -2412,13 +2397,13 @@ def test_list_tables_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tables(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2733,12 +2718,7 @@ async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_table ] = mock_object @@ -3097,12 +3077,7 @@ async def test_update_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_table ] = mock_object @@ -3471,12 +3446,7 @@ async def test_delete_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_table ] = mock_object @@ -3831,12 +3801,7 @@ async def test_undelete_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.undelete_table ] = mock_object @@ -4216,12 +4181,7 @@ async def test_create_authorized_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_authorized_view ] = mock_object @@ -4632,12 +4592,7 @@ async def test_list_authorized_views_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_authorized_views ] = mock_object @@ -4887,13 +4842,13 @@ def test_list_authorized_views_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_authorized_views(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5228,12 +5183,7 @@ async def test_get_authorized_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_authorized_view ] = mock_object @@ -5620,12 +5570,7 @@ async def test_update_authorized_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_authorized_view ] = mock_object @@ -6019,12 +5964,7 @@ async def test_delete_authorized_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_authorized_view ] = mock_object @@ -6409,12 +6349,7 @@ async def test_modify_column_families_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.modify_column_families ] = mock_object @@ -6812,12 +6747,7 @@ async def test_drop_row_range_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.drop_row_range ] = mock_object @@ -7106,12 +7036,7 @@ async def test_generate_consistency_token_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.generate_consistency_token ] = mock_object @@ -7498,12 +7423,7 @@ async def test_check_consistency_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.check_consistency ] = mock_object @@ -7893,12 +7813,7 @@ async def test_snapshot_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.snapshot_table ] = mock_object @@ -8303,12 +8218,7 @@ async def test_get_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_snapshot ] = mock_object @@ -8677,12 +8587,7 @@ async def test_list_snapshots_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_snapshots ] = mock_object @@ -8920,13 +8825,13 @@ def test_list_snapshots_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_snapshots(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9230,12 +9135,7 @@ async def test_delete_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_snapshot ] = mock_object @@ -9592,12 +9492,7 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_backup ] = mock_object @@ -9993,12 +9888,7 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_backup ] = mock_object @@ -10374,12 +10264,7 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_backup ] = mock_object @@ -10751,12 +10636,7 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup ] = mock_object @@ -11118,12 +10998,7 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_backups ] = mock_object @@ -11361,13 +11236,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -11681,12 +11556,7 @@ async def test_restore_table_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.restore_table ] = mock_object @@ -11973,12 +11843,7 @@ async def test_copy_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.copy_backup ] = mock_object @@ -12376,12 +12241,7 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy ] = mock_object @@ -12763,12 +12623,7 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy ] = mock_object @@ -13160,12 +13015,7 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions ] = mock_object diff --git a/tests/unit/gapic/bigtable_v2/test_bigtable.py b/tests/unit/gapic/bigtable_v2/test_bigtable.py index 4d8a6ec6b..348338d18 100644 --- a/tests/unit/gapic/bigtable_v2/test_bigtable.py +++ b/tests/unit/gapic/bigtable_v2/test_bigtable.py @@ -51,9 +51,11 @@ from google.cloud.bigtable_v2.types import bigtable from google.cloud.bigtable_v2.types import data from google.cloud.bigtable_v2.types import request_stats +from google.cloud.bigtable_v2.types import types from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore import google.auth @@ -1236,12 +1238,7 @@ async def test_read_rows_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.read_rows ] = mock_object @@ -1616,12 +1613,7 @@ async def test_sample_row_keys_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.sample_row_keys ] = mock_object @@ -1992,12 +1984,7 @@ async def test_mutate_row_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.mutate_row ] = mock_object @@ -2416,12 +2403,7 @@ async def test_mutate_rows_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.mutate_rows ] = mock_object @@ -2821,12 +2803,7 @@ async def test_check_and_mutate_row_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.check_and_mutate_row ] = mock_object @@ -3356,12 +3333,7 @@ async def test_ping_and_warm_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.ping_and_warm ] = mock_object @@ -3726,12 +3698,7 @@ async def test_read_modify_write_row_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.read_modify_write_row ] = mock_object @@ -4155,12 +4122,7 @@ async def test_generate_initial_change_stream_partitions_async_use_cached_wrappe ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.generate_initial_change_stream_partitions ] = mock_object @@ -4560,12 +4522,7 @@ async def test_read_change_stream_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.read_change_stream ] = mock_object @@ -4786,57 +4743,95 @@ async def test_read_change_stream_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - bigtable.ReadRowsRequest, + bigtable.ExecuteQueryRequest, dict, ], ) -def test_read_rows_rest(request_type): +def test_execute_query(request_type, transport: str = "grpc"): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = bigtable.ReadRowsResponse( - last_scanned_row_key=b"last_scanned_row_key_blob", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([bigtable.ExecuteQueryResponse()]) + response = client.execute_query(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = bigtable.ReadRowsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = bigtable.ExecuteQueryRequest() + assert args[0] == request - json_return_value = "[{}]".format(json_return_value) + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, bigtable.ExecuteQueryResponse) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.read_rows(request) - assert isinstance(response, Iterable) - response = next(response) +def test_execute_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.ReadRowsResponse) - assert response.last_scanned_row_key == b"last_scanned_row_key_blob" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.execute_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.ExecuteQueryRequest() -def test_read_rows_rest_use_cached_wrapped_rpc(): +def test_execute_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.ExecuteQueryRequest( + instance_name="instance_name_value", + app_profile_id="app_profile_id_value", + query="query_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.execute_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.ExecuteQueryRequest( + instance_name="instance_name_value", + app_profile_id="app_profile_id_value", + query="query_value", + ) + + +def test_execute_query_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -4844,184 +4839,279 @@ def test_read_rows_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.read_rows in client._transport._wrapped_methods + assert client._transport.execute_query in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.read_rows] = mock_rpc - + client._transport._wrapped_methods[client._transport.execute_query] = mock_rpc request = {} - client.read_rows(request) + client.execute_query(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.read_rows(request) + client.execute_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_read_rows_rest_interceptors(null_interceptor): - transport = transports.BigtableRestTransport( +@pytest.mark.asyncio +async def test_execute_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), + transport="grpc_asyncio", ) - client = BigtableClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.BigtableRestInterceptor, "post_read_rows" - ) as post, mock.patch.object( - transports.BigtableRestInterceptor, "pre_read_rows" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = bigtable.ReadRowsRequest.pb(bigtable.ReadRowsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = bigtable.ReadRowsResponse.to_json( - bigtable.ReadRowsResponse() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[bigtable.ExecuteQueryResponse()] ) - req.return_value._content = "[{}]".format(req.return_value._content) + response = await client.execute_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.ExecuteQueryRequest() - request = bigtable.ReadRowsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = bigtable.ReadRowsResponse() - client.read_rows( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], +@pytest.mark.asyncio +async def test_execute_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - pre.assert_called_once() - post.assert_called_once() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + # Ensure method has been cached + assert ( + client._client._transport.execute_query + in client._client._transport._wrapped_methods + ) -def test_read_rows_rest_bad_request( - transport: str = "rest", request_type=bigtable.ReadRowsRequest -): - client = BigtableClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.execute_query + ] = mock_object - # send a request that will satisfy transcoding - request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} - request = request_type(**request_init) + request = {} + await client.execute_query(request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.read_rows(request) + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + await client.execute_query(request) -def test_read_rows_rest_flattened(): - client = BigtableClient( + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_execute_query_async( + transport: str = "grpc_asyncio", request_type=bigtable.ExecuteQueryRequest +): + client = BigtableAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = bigtable.ReadRowsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "table_name": "projects/sample1/instances/sample2/tables/sample3" - } + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # get truthy value for each flattened field - mock_args = dict( - table_name="table_name_value", - app_profile_id="app_profile_id_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[bigtable.ExecuteQueryResponse()] ) - mock_args.update(sample_request) + response = await client.execute_query(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = bigtable.ReadRowsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = bigtable.ExecuteQueryRequest() + assert args[0] == request - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - client.read_rows(**mock_args) + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, bigtable.ExecuteQueryResponse) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v2/{table_name=projects/*/instances/*/tables/*}:readRows" - % client.transport._host, - args[1], - ) +@pytest.mark.asyncio +async def test_execute_query_async_from_dict(): + await test_execute_query_async(request_type=dict) -def test_read_rows_rest_flattened_error(transport: str = "rest"): + +def test_execute_query_routing_parameters(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.read_rows( - bigtable.ReadRowsRequest(), - table_name="table_name_value", + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable.ExecuteQueryRequest( + **{"instance_name": "projects/sample1/instances/sample2"} + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_query), "__call__") as call: + call.return_value = iter([bigtable.ExecuteQueryResponse()]) + client.execute_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw["metadata"] + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable.ExecuteQueryRequest(**{"app_profile_id": "sample1"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_query), "__call__") as call: + call.return_value = iter([bigtable.ExecuteQueryResponse()]) + client.execute_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw["metadata"] + + +def test_execute_query_flattened(): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([bigtable.ExecuteQueryResponse()]) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.execute_query( + instance_name="instance_name_value", + query="query_value", app_profile_id="app_profile_id_value", ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance_name + mock_val = "instance_name_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + arg = args[0].app_profile_id + mock_val = "app_profile_id_value" + assert arg == mock_val + -def test_read_rows_rest_error(): +def test_execute_query_flattened_error(): client = BigtableClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.execute_query( + bigtable.ExecuteQueryRequest(), + instance_name="instance_name_value", + query="query_value", + app_profile_id="app_profile_id_value", + ) + + +@pytest.mark.asyncio +async def test_execute_query_flattened_async(): + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([bigtable.ExecuteQueryResponse()]) + + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.execute_query( + instance_name="instance_name_value", + query="query_value", + app_profile_id="app_profile_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance_name + mock_val = "instance_name_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + arg = args[0].app_profile_id + mock_val = "app_profile_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_execute_query_flattened_error_async(): + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.execute_query( + bigtable.ExecuteQueryRequest(), + instance_name="instance_name_value", + query="query_value", + app_profile_id="app_profile_id_value", + ) + @pytest.mark.parametrize( "request_type", [ - bigtable.SampleRowKeysRequest, + bigtable.ReadRowsRequest, dict, ], ) -def test_sample_row_keys_rest(request_type): +def test_read_rows_rest(request_type): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5034,16 +5124,15 @@ def test_sample_row_keys_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.SampleRowKeysResponse( - row_key=b"row_key_blob", - offset_bytes=1293, + return_value = bigtable.ReadRowsResponse( + last_scanned_row_key=b"last_scanned_row_key_blob", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.SampleRowKeysResponse.pb(return_value) + return_value = bigtable.ReadRowsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -5052,55 +5141,598 @@ def test_sample_row_keys_rest(request_type): req.return_value = response_value with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) - response = client.sample_row_keys(request) + response = client.read_rows(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, bigtable.ReadRowsResponse) + assert response.last_scanned_row_key == b"last_scanned_row_key_blob" + + +def test_read_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.read_rows in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.read_rows] = mock_rpc + + request = {} + client.read_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_read_rows_rest_interceptors(null_interceptor): + transport = transports.BigtableRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), + ) + client = BigtableClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BigtableRestInterceptor, "post_read_rows" + ) as post, mock.patch.object( + transports.BigtableRestInterceptor, "pre_read_rows" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = bigtable.ReadRowsRequest.pb(bigtable.ReadRowsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = bigtable.ReadRowsResponse.to_json( + bigtable.ReadRowsResponse() + ) + req.return_value._content = "[{}]".format(req.return_value._content) + + request = bigtable.ReadRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = bigtable.ReadRowsResponse() + + client.read_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_read_rows_rest_bad_request( + transport: str = "rest", request_type=bigtable.ReadRowsRequest +): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.read_rows(request) + + +def test_read_rows_rest_flattened(): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = bigtable.ReadRowsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "table_name": "projects/sample1/instances/sample2/tables/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + table_name="table_name_value", + app_profile_id="app_profile_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = bigtable.ReadRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + client.read_rows(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{table_name=projects/*/instances/*/tables/*}:readRows" + % client.transport._host, + args[1], + ) + + +def test_read_rows_rest_flattened_error(transport: str = "rest"): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.read_rows( + bigtable.ReadRowsRequest(), + table_name="table_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_read_rows_rest_error(): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable.SampleRowKeysRequest, + dict, + ], +) +def test_sample_row_keys_rest(request_type): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = bigtable.SampleRowKeysResponse( + row_key=b"row_key_blob", + offset_bytes=1293, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = bigtable.SampleRowKeysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.sample_row_keys(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, bigtable.SampleRowKeysResponse) + assert response.row_key == b"row_key_blob" + assert response.offset_bytes == 1293 + + +def test_sample_row_keys_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.sample_row_keys in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.sample_row_keys] = mock_rpc + + request = {} + client.sample_row_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.sample_row_keys(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_sample_row_keys_rest_interceptors(null_interceptor): + transport = transports.BigtableRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), + ) + client = BigtableClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BigtableRestInterceptor, "post_sample_row_keys" + ) as post, mock.patch.object( + transports.BigtableRestInterceptor, "pre_sample_row_keys" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = bigtable.SampleRowKeysRequest.pb(bigtable.SampleRowKeysRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = bigtable.SampleRowKeysResponse.to_json( + bigtable.SampleRowKeysResponse() + ) + req.return_value._content = "[{}]".format(req.return_value._content) + + request = bigtable.SampleRowKeysRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = bigtable.SampleRowKeysResponse() + + client.sample_row_keys( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_sample_row_keys_rest_bad_request( + transport: str = "rest", request_type=bigtable.SampleRowKeysRequest +): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.sample_row_keys(request) + + +def test_sample_row_keys_rest_flattened(): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = bigtable.SampleRowKeysResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "table_name": "projects/sample1/instances/sample2/tables/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + table_name="table_name_value", + app_profile_id="app_profile_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = bigtable.SampleRowKeysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + client.sample_row_keys(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys" + % client.transport._host, + args[1], + ) + + +def test_sample_row_keys_rest_flattened_error(transport: str = "rest"): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.sample_row_keys( + bigtable.SampleRowKeysRequest(), + table_name="table_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_sample_row_keys_rest_error(): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable.MutateRowRequest, + dict, + ], +) +def test_mutate_row_rest(request_type): + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = bigtable.MutateRowResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = bigtable.MutateRowResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.mutate_row(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, bigtable.MutateRowResponse) + + +def test_mutate_row_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.mutate_row in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.mutate_row] = mock_rpc + + request = {} + client.mutate_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.mutate_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_mutate_row_rest_required_fields(request_type=bigtable.MutateRowRequest): + transport_class = transports.BigtableRestTransport + + request_init = {} + request_init["row_key"] = b"" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).mutate_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["rowKey"] = b"row_key_blob" - assert isinstance(response, Iterable) - response = next(response) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).mutate_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.SampleRowKeysResponse) - assert response.row_key == b"row_key_blob" - assert response.offset_bytes == 1293 + # verify required fields with non-default values are left alone + assert "rowKey" in jsonified_request + assert jsonified_request["rowKey"] == b"row_key_blob" + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) -def test_sample_row_keys_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BigtableClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Designate an appropriate value for the returned response. + return_value = bigtable.MutateRowResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + response_value = Response() + response_value.status_code = 200 - # Ensure method has been cached - assert client._transport.sample_row_keys in client._transport._wrapped_methods + # Convert return value to protobuf type + return_value = bigtable.MutateRowResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.sample_row_keys] = mock_rpc + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - request = {} - client.sample_row_keys(request) + response = client.mutate_row(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - client.sample_row_keys(request) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +def test_mutate_row_rest_unset_required_fields(): + transport = transports.BigtableRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.mutate_row._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "rowKey", + "mutations", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sample_row_keys_rest_interceptors(null_interceptor): +def test_mutate_row_rest_interceptors(null_interceptor): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), @@ -5111,13 +5743,13 @@ def test_sample_row_keys_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableRestInterceptor, "post_sample_row_keys" + transports.BigtableRestInterceptor, "post_mutate_row" ) as post, mock.patch.object( - transports.BigtableRestInterceptor, "pre_sample_row_keys" + transports.BigtableRestInterceptor, "pre_mutate_row" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable.SampleRowKeysRequest.pb(bigtable.SampleRowKeysRequest()) + pb_message = bigtable.MutateRowRequest.pb(bigtable.MutateRowRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5128,20 +5760,19 @@ def test_sample_row_keys_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = bigtable.SampleRowKeysResponse.to_json( - bigtable.SampleRowKeysResponse() + req.return_value._content = bigtable.MutateRowResponse.to_json( + bigtable.MutateRowResponse() ) - req.return_value._content = "[{}]".format(req.return_value._content) - request = bigtable.SampleRowKeysRequest() + request = bigtable.MutateRowRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = bigtable.SampleRowKeysResponse() + post.return_value = bigtable.MutateRowResponse() - client.sample_row_keys( + client.mutate_row( request, metadata=[ ("key", "val"), @@ -5153,8 +5784,8 @@ def test_sample_row_keys_rest_interceptors(null_interceptor): post.assert_called_once() -def test_sample_row_keys_rest_bad_request( - transport: str = "rest", request_type=bigtable.SampleRowKeysRequest +def test_mutate_row_rest_bad_request( + transport: str = "rest", request_type=bigtable.MutateRowRequest ): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5174,10 +5805,10 @@ def test_sample_row_keys_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.sample_row_keys(request) + client.mutate_row(request) -def test_sample_row_keys_rest_flattened(): +def test_mutate_row_rest_flattened(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5186,7 +5817,7 @@ def test_sample_row_keys_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.SampleRowKeysResponse() + return_value = bigtable.MutateRowResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -5196,6 +5827,12 @@ def test_sample_row_keys_rest_flattened(): # get truthy value for each flattened field mock_args = dict( table_name="table_name_value", + row_key=b"row_key_blob", + mutations=[ + data.Mutation( + set_cell=data.Mutation.SetCell(family_name="family_name_value") + ) + ], app_profile_id="app_profile_id_value", ) mock_args.update(sample_request) @@ -5204,28 +5841,25 @@ def test_sample_row_keys_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.SampleRowKeysResponse.pb(return_value) + return_value = bigtable.MutateRowResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - client.sample_row_keys(**mock_args) + client.mutate_row(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys" + "%s/v2/{table_name=projects/*/instances/*/tables/*}:mutateRow" % client.transport._host, args[1], ) -def test_sample_row_keys_rest_flattened_error(transport: str = "rest"): +def test_mutate_row_rest_flattened_error(transport: str = "rest"): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5234,14 +5868,20 @@ def test_sample_row_keys_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.sample_row_keys( - bigtable.SampleRowKeysRequest(), + client.mutate_row( + bigtable.MutateRowRequest(), table_name="table_name_value", + row_key=b"row_key_blob", + mutations=[ + data.Mutation( + set_cell=data.Mutation.SetCell(family_name="family_name_value") + ) + ], app_profile_id="app_profile_id_value", ) -def test_sample_row_keys_rest_error(): +def test_mutate_row_rest_error(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5250,11 +5890,11 @@ def test_sample_row_keys_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable.MutateRowRequest, + bigtable.MutateRowsRequest, dict, ], ) -def test_mutate_row_rest(request_type): +def test_mutate_rows_rest(request_type): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5267,24 +5907,31 @@ def test_mutate_row_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.MutateRowResponse() + return_value = bigtable.MutateRowsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.MutateRowResponse.pb(return_value) + return_value = bigtable.MutateRowsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.mutate_row(request) + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.mutate_rows(request) + + assert isinstance(response, Iterable) + response = next(response) # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.MutateRowResponse) + assert isinstance(response, bigtable.MutateRowsResponse) -def test_mutate_row_rest_use_cached_wrapped_rpc(): +def test_mutate_rows_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5298,33 +5945,32 @@ def test_mutate_row_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.mutate_row in client._transport._wrapped_methods + assert client._transport.mutate_rows in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.mutate_row] = mock_rpc + client._transport._wrapped_methods[client._transport.mutate_rows] = mock_rpc request = {} - client.mutate_row(request) + client.mutate_rows(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.mutate_row(request) + client.mutate_rows(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_mutate_row_rest_required_fields(request_type=bigtable.MutateRowRequest): +def test_mutate_rows_rest_required_fields(request_type=bigtable.MutateRowsRequest): transport_class = transports.BigtableRestTransport request_init = {} - request_init["row_key"] = b"" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5335,21 +5981,17 @@ def test_mutate_row_rest_required_fields(request_type=bigtable.MutateRowRequest) unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).mutate_row._get_unset_required_fields(jsonified_request) + ).mutate_rows._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["rowKey"] = b"row_key_blob" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).mutate_row._get_unset_required_fields(jsonified_request) + ).mutate_rows._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "rowKey" in jsonified_request - assert jsonified_request["rowKey"] == b"row_key_blob" client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5358,7 +6000,7 @@ def test_mutate_row_rest_required_fields(request_type=bigtable.MutateRowRequest) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = bigtable.MutateRowResponse() + return_value = bigtable.MutateRowsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5380,38 +6022,33 @@ def test_mutate_row_rest_required_fields(request_type=bigtable.MutateRowRequest) response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.MutateRowResponse.pb(return_value) + return_value = bigtable.MutateRowsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.mutate_row(request) + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.mutate_rows(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_mutate_row_rest_unset_required_fields(): +def test_mutate_rows_rest_unset_required_fields(): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.mutate_row._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "rowKey", - "mutations", - ) - ) - ) + unset_fields = transport.mutate_rows._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("entries",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_mutate_row_rest_interceptors(null_interceptor): +def test_mutate_rows_rest_interceptors(null_interceptor): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), @@ -5422,13 +6059,13 @@ def test_mutate_row_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableRestInterceptor, "post_mutate_row" + transports.BigtableRestInterceptor, "post_mutate_rows" ) as post, mock.patch.object( - transports.BigtableRestInterceptor, "pre_mutate_row" + transports.BigtableRestInterceptor, "pre_mutate_rows" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable.MutateRowRequest.pb(bigtable.MutateRowRequest()) + pb_message = bigtable.MutateRowsRequest.pb(bigtable.MutateRowsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5439,19 +6076,20 @@ def test_mutate_row_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = bigtable.MutateRowResponse.to_json( - bigtable.MutateRowResponse() + req.return_value._content = bigtable.MutateRowsResponse.to_json( + bigtable.MutateRowsResponse() ) + req.return_value._content = "[{}]".format(req.return_value._content) - request = bigtable.MutateRowRequest() + request = bigtable.MutateRowsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = bigtable.MutateRowResponse() + post.return_value = bigtable.MutateRowsResponse() - client.mutate_row( + client.mutate_rows( request, metadata=[ ("key", "val"), @@ -5463,8 +6101,8 @@ def test_mutate_row_rest_interceptors(null_interceptor): post.assert_called_once() -def test_mutate_row_rest_bad_request( - transport: str = "rest", request_type=bigtable.MutateRowRequest +def test_mutate_rows_rest_bad_request( + transport: str = "rest", request_type=bigtable.MutateRowsRequest ): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5484,10 +6122,10 @@ def test_mutate_row_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.mutate_row(request) + client.mutate_rows(request) -def test_mutate_row_rest_flattened(): +def test_mutate_rows_rest_flattened(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5496,7 +6134,7 @@ def test_mutate_row_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.MutateRowResponse() + return_value = bigtable.MutateRowsResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -5506,12 +6144,7 @@ def test_mutate_row_rest_flattened(): # get truthy value for each flattened field mock_args = dict( table_name="table_name_value", - row_key=b"row_key_blob", - mutations=[ - data.Mutation( - set_cell=data.Mutation.SetCell(family_name="family_name_value") - ) - ], + entries=[bigtable.MutateRowsRequest.Entry(row_key=b"row_key_blob")], app_profile_id="app_profile_id_value", ) mock_args.update(sample_request) @@ -5520,25 +6153,28 @@ def test_mutate_row_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.MutateRowResponse.pb(return_value) + return_value = bigtable.MutateRowsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.mutate_row(**mock_args) + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + client.mutate_rows(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{table_name=projects/*/instances/*/tables/*}:mutateRow" + "%s/v2/{table_name=projects/*/instances/*/tables/*}:mutateRows" % client.transport._host, args[1], ) -def test_mutate_row_rest_flattened_error(transport: str = "rest"): +def test_mutate_rows_rest_flattened_error(transport: str = "rest"): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5547,20 +6183,15 @@ def test_mutate_row_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.mutate_row( - bigtable.MutateRowRequest(), + client.mutate_rows( + bigtable.MutateRowsRequest(), table_name="table_name_value", - row_key=b"row_key_blob", - mutations=[ - data.Mutation( - set_cell=data.Mutation.SetCell(family_name="family_name_value") - ) - ], + entries=[bigtable.MutateRowsRequest.Entry(row_key=b"row_key_blob")], app_profile_id="app_profile_id_value", ) -def test_mutate_row_rest_error(): +def test_mutate_rows_rest_error(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5569,11 +6200,11 @@ def test_mutate_row_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable.MutateRowsRequest, + bigtable.CheckAndMutateRowRequest, dict, ], ) -def test_mutate_rows_rest(request_type): +def test_check_and_mutate_row_rest(request_type): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5586,31 +6217,27 @@ def test_mutate_rows_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.MutateRowsResponse() + return_value = bigtable.CheckAndMutateRowResponse( + predicate_matched=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.MutateRowsResponse.pb(return_value) + return_value = bigtable.CheckAndMutateRowResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.mutate_rows(request) - - assert isinstance(response, Iterable) - response = next(response) + response = client.check_and_mutate_row(request) # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.MutateRowsResponse) + assert isinstance(response, bigtable.CheckAndMutateRowResponse) + assert response.predicate_matched is True -def test_mutate_rows_rest_use_cached_wrapped_rpc(): +def test_check_and_mutate_row_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5624,32 +6251,39 @@ def test_mutate_rows_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.mutate_rows in client._transport._wrapped_methods + assert ( + client._transport.check_and_mutate_row in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.mutate_rows] = mock_rpc + client._transport._wrapped_methods[ + client._transport.check_and_mutate_row + ] = mock_rpc request = {} - client.mutate_rows(request) + client.check_and_mutate_row(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.mutate_rows(request) + client.check_and_mutate_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_mutate_rows_rest_required_fields(request_type=bigtable.MutateRowsRequest): +def test_check_and_mutate_row_rest_required_fields( + request_type=bigtable.CheckAndMutateRowRequest, +): transport_class = transports.BigtableRestTransport request_init = {} + request_init["row_key"] = b"" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5660,17 +6294,21 @@ def test_mutate_rows_rest_required_fields(request_type=bigtable.MutateRowsReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).mutate_rows._get_unset_required_fields(jsonified_request) + ).check_and_mutate_row._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["rowKey"] = b"row_key_blob" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).mutate_rows._get_unset_required_fields(jsonified_request) + ).check_and_mutate_row._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "rowKey" in jsonified_request + assert jsonified_request["rowKey"] == b"row_key_blob" client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5679,7 +6317,7 @@ def test_mutate_rows_rest_required_fields(request_type=bigtable.MutateRowsReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = bigtable.MutateRowsResponse() + return_value = bigtable.CheckAndMutateRowResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5701,33 +6339,30 @@ def test_mutate_rows_rest_required_fields(request_type=bigtable.MutateRowsReques response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.MutateRowsResponse.pb(return_value) + return_value = bigtable.CheckAndMutateRowResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.mutate_rows(request) + response = client.check_and_mutate_row(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_mutate_rows_rest_unset_required_fields(): +def test_check_and_mutate_row_rest_unset_required_fields(): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.mutate_rows._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("entries",))) + unset_fields = transport.check_and_mutate_row._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("rowKey",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_mutate_rows_rest_interceptors(null_interceptor): +def test_check_and_mutate_row_rest_interceptors(null_interceptor): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), @@ -5738,13 +6373,15 @@ def test_mutate_rows_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableRestInterceptor, "post_mutate_rows" + transports.BigtableRestInterceptor, "post_check_and_mutate_row" ) as post, mock.patch.object( - transports.BigtableRestInterceptor, "pre_mutate_rows" + transports.BigtableRestInterceptor, "pre_check_and_mutate_row" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable.MutateRowsRequest.pb(bigtable.MutateRowsRequest()) + pb_message = bigtable.CheckAndMutateRowRequest.pb( + bigtable.CheckAndMutateRowRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5755,20 +6392,19 @@ def test_mutate_rows_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = bigtable.MutateRowsResponse.to_json( - bigtable.MutateRowsResponse() + req.return_value._content = bigtable.CheckAndMutateRowResponse.to_json( + bigtable.CheckAndMutateRowResponse() ) - req.return_value._content = "[{}]".format(req.return_value._content) - request = bigtable.MutateRowsRequest() + request = bigtable.CheckAndMutateRowRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = bigtable.MutateRowsResponse() + post.return_value = bigtable.CheckAndMutateRowResponse() - client.mutate_rows( + client.check_and_mutate_row( request, metadata=[ ("key", "val"), @@ -5780,8 +6416,8 @@ def test_mutate_rows_rest_interceptors(null_interceptor): post.assert_called_once() -def test_mutate_rows_rest_bad_request( - transport: str = "rest", request_type=bigtable.MutateRowsRequest +def test_check_and_mutate_row_rest_bad_request( + transport: str = "rest", request_type=bigtable.CheckAndMutateRowRequest ): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5801,10 +6437,10 @@ def test_mutate_rows_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.mutate_rows(request) + client.check_and_mutate_row(request) -def test_mutate_rows_rest_flattened(): +def test_check_and_mutate_row_rest_flattened(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5813,7 +6449,7 @@ def test_mutate_rows_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.MutateRowsResponse() + return_value = bigtable.CheckAndMutateRowResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -5823,7 +6459,28 @@ def test_mutate_rows_rest_flattened(): # get truthy value for each flattened field mock_args = dict( table_name="table_name_value", - entries=[bigtable.MutateRowsRequest.Entry(row_key=b"row_key_blob")], + row_key=b"row_key_blob", + predicate_filter=data.RowFilter( + chain=data.RowFilter.Chain( + filters=[ + data.RowFilter( + chain=data.RowFilter.Chain( + filters=[data.RowFilter(chain=None)] + ) + ) + ] + ) + ), + true_mutations=[ + data.Mutation( + set_cell=data.Mutation.SetCell(family_name="family_name_value") + ) + ], + false_mutations=[ + data.Mutation( + set_cell=data.Mutation.SetCell(family_name="family_name_value") + ) + ], app_profile_id="app_profile_id_value", ) mock_args.update(sample_request) @@ -5832,28 +6489,25 @@ def test_mutate_rows_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.MutateRowsResponse.pb(return_value) + return_value = bigtable.CheckAndMutateRowResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - client.mutate_rows(**mock_args) + client.check_and_mutate_row(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{table_name=projects/*/instances/*/tables/*}:mutateRows" + "%s/v2/{table_name=projects/*/instances/*/tables/*}:checkAndMutateRow" % client.transport._host, args[1], ) -def test_mutate_rows_rest_flattened_error(transport: str = "rest"): +def test_check_and_mutate_row_rest_flattened_error(transport: str = "rest"): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5862,15 +6516,36 @@ def test_mutate_rows_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.mutate_rows( - bigtable.MutateRowsRequest(), + client.check_and_mutate_row( + bigtable.CheckAndMutateRowRequest(), table_name="table_name_value", - entries=[bigtable.MutateRowsRequest.Entry(row_key=b"row_key_blob")], + row_key=b"row_key_blob", + predicate_filter=data.RowFilter( + chain=data.RowFilter.Chain( + filters=[ + data.RowFilter( + chain=data.RowFilter.Chain( + filters=[data.RowFilter(chain=None)] + ) + ) + ] + ) + ), + true_mutations=[ + data.Mutation( + set_cell=data.Mutation.SetCell(family_name="family_name_value") + ) + ], + false_mutations=[ + data.Mutation( + set_cell=data.Mutation.SetCell(family_name="family_name_value") + ) + ], app_profile_id="app_profile_id_value", ) -def test_mutate_rows_rest_error(): +def test_check_and_mutate_row_rest_error(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5879,44 +6554,41 @@ def test_mutate_rows_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable.CheckAndMutateRowRequest, + bigtable.PingAndWarmRequest, dict, ], ) -def test_check_and_mutate_row_rest(request_type): +def test_ping_and_warm_rest(request_type): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = {"name": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.CheckAndMutateRowResponse( - predicate_matched=True, - ) + return_value = bigtable.PingAndWarmResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.CheckAndMutateRowResponse.pb(return_value) + return_value = bigtable.PingAndWarmResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.check_and_mutate_row(request) + response = client.ping_and_warm(request) # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.CheckAndMutateRowResponse) - assert response.predicate_matched is True + assert isinstance(response, bigtable.PingAndWarmResponse) -def test_check_and_mutate_row_rest_use_cached_wrapped_rpc(): +def test_ping_and_warm_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5930,39 +6602,33 @@ def test_check_and_mutate_row_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.check_and_mutate_row in client._transport._wrapped_methods - ) + assert client._transport.ping_and_warm in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.check_and_mutate_row - ] = mock_rpc + client._transport._wrapped_methods[client._transport.ping_and_warm] = mock_rpc request = {} - client.check_and_mutate_row(request) + client.ping_and_warm(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.check_and_mutate_row(request) + client.ping_and_warm(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_check_and_mutate_row_rest_required_fields( - request_type=bigtable.CheckAndMutateRowRequest, -): +def test_ping_and_warm_rest_required_fields(request_type=bigtable.PingAndWarmRequest): transport_class = transports.BigtableRestTransport request_init = {} - request_init["row_key"] = b"" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5973,21 +6639,21 @@ def test_check_and_mutate_row_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).check_and_mutate_row._get_unset_required_fields(jsonified_request) + ).ping_and_warm._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["rowKey"] = b"row_key_blob" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).check_and_mutate_row._get_unset_required_fields(jsonified_request) + ).ping_and_warm._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "rowKey" in jsonified_request - assert jsonified_request["rowKey"] == b"row_key_blob" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5996,7 +6662,7 @@ def test_check_and_mutate_row_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = bigtable.CheckAndMutateRowResponse() + return_value = bigtable.PingAndWarmResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6018,30 +6684,30 @@ def test_check_and_mutate_row_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.CheckAndMutateRowResponse.pb(return_value) + return_value = bigtable.PingAndWarmResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.check_and_mutate_row(request) + response = client.ping_and_warm(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_check_and_mutate_row_rest_unset_required_fields(): +def test_ping_and_warm_rest_unset_required_fields(): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.check_and_mutate_row._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("rowKey",))) + unset_fields = transport.ping_and_warm._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_check_and_mutate_row_rest_interceptors(null_interceptor): +def test_ping_and_warm_rest_interceptors(null_interceptor): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), @@ -6052,15 +6718,13 @@ def test_check_and_mutate_row_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableRestInterceptor, "post_check_and_mutate_row" + transports.BigtableRestInterceptor, "post_ping_and_warm" ) as post, mock.patch.object( - transports.BigtableRestInterceptor, "pre_check_and_mutate_row" + transports.BigtableRestInterceptor, "pre_ping_and_warm" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable.CheckAndMutateRowRequest.pb( - bigtable.CheckAndMutateRowRequest() - ) + pb_message = bigtable.PingAndWarmRequest.pb(bigtable.PingAndWarmRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6071,19 +6735,19 @@ def test_check_and_mutate_row_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = bigtable.CheckAndMutateRowResponse.to_json( - bigtable.CheckAndMutateRowResponse() + req.return_value._content = bigtable.PingAndWarmResponse.to_json( + bigtable.PingAndWarmResponse() ) - request = bigtable.CheckAndMutateRowRequest() + request = bigtable.PingAndWarmRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = bigtable.CheckAndMutateRowResponse() + post.return_value = bigtable.PingAndWarmResponse() - client.check_and_mutate_row( + client.ping_and_warm( request, metadata=[ ("key", "val"), @@ -6095,8 +6759,8 @@ def test_check_and_mutate_row_rest_interceptors(null_interceptor): post.assert_called_once() -def test_check_and_mutate_row_rest_bad_request( - transport: str = "rest", request_type=bigtable.CheckAndMutateRowRequest +def test_ping_and_warm_rest_bad_request( + transport: str = "rest", request_type=bigtable.PingAndWarmRequest ): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6104,7 +6768,7 @@ def test_check_and_mutate_row_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = {"name": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6116,10 +6780,10 @@ def test_check_and_mutate_row_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.check_and_mutate_row(request) + client.ping_and_warm(request) -def test_check_and_mutate_row_rest_flattened(): +def test_ping_and_warm_rest_flattened(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6128,38 +6792,14 @@ def test_check_and_mutate_row_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.CheckAndMutateRowResponse() + return_value = bigtable.PingAndWarmResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "table_name": "projects/sample1/instances/sample2/tables/sample3" - } + sample_request = {"name": "projects/sample1/instances/sample2"} # get truthy value for each flattened field mock_args = dict( - table_name="table_name_value", - row_key=b"row_key_blob", - predicate_filter=data.RowFilter( - chain=data.RowFilter.Chain( - filters=[ - data.RowFilter( - chain=data.RowFilter.Chain( - filters=[data.RowFilter(chain=None)] - ) - ) - ] - ) - ), - true_mutations=[ - data.Mutation( - set_cell=data.Mutation.SetCell(family_name="family_name_value") - ) - ], - false_mutations=[ - data.Mutation( - set_cell=data.Mutation.SetCell(family_name="family_name_value") - ) - ], + name="name_value", app_profile_id="app_profile_id_value", ) mock_args.update(sample_request) @@ -6168,25 +6808,23 @@ def test_check_and_mutate_row_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.CheckAndMutateRowResponse.pb(return_value) + return_value = bigtable.PingAndWarmResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.check_and_mutate_row(**mock_args) + client.ping_and_warm(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{table_name=projects/*/instances/*/tables/*}:checkAndMutateRow" - % client.transport._host, - args[1], + "%s/v2/{name=projects/*/instances/*}:ping" % client.transport._host, args[1] ) -def test_check_and_mutate_row_rest_flattened_error(transport: str = "rest"): +def test_ping_and_warm_rest_flattened_error(transport: str = "rest"): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6195,36 +6833,14 @@ def test_check_and_mutate_row_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.check_and_mutate_row( - bigtable.CheckAndMutateRowRequest(), - table_name="table_name_value", - row_key=b"row_key_blob", - predicate_filter=data.RowFilter( - chain=data.RowFilter.Chain( - filters=[ - data.RowFilter( - chain=data.RowFilter.Chain( - filters=[data.RowFilter(chain=None)] - ) - ) - ] - ) - ), - true_mutations=[ - data.Mutation( - set_cell=data.Mutation.SetCell(family_name="family_name_value") - ) - ], - false_mutations=[ - data.Mutation( - set_cell=data.Mutation.SetCell(family_name="family_name_value") - ) - ], + client.ping_and_warm( + bigtable.PingAndWarmRequest(), + name="name_value", app_profile_id="app_profile_id_value", ) -def test_check_and_mutate_row_rest_error(): +def test_ping_and_warm_rest_error(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6233,41 +6849,41 @@ def test_check_and_mutate_row_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable.PingAndWarmRequest, + bigtable.ReadModifyWriteRowRequest, dict, ], ) -def test_ping_and_warm_rest(request_type): +def test_read_modify_write_row_rest(request_type): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2"} + request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.PingAndWarmResponse() + return_value = bigtable.ReadModifyWriteRowResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.PingAndWarmResponse.pb(return_value) + return_value = bigtable.ReadModifyWriteRowResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.ping_and_warm(request) + response = client.read_modify_write_row(request) # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.PingAndWarmResponse) + assert isinstance(response, bigtable.ReadModifyWriteRowResponse) -def test_ping_and_warm_rest_use_cached_wrapped_rpc(): +def test_read_modify_write_row_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6281,33 +6897,40 @@ def test_ping_and_warm_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.ping_and_warm in client._transport._wrapped_methods + assert ( + client._transport.read_modify_write_row + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.ping_and_warm] = mock_rpc + client._transport._wrapped_methods[ + client._transport.read_modify_write_row + ] = mock_rpc request = {} - client.ping_and_warm(request) + client.read_modify_write_row(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.ping_and_warm(request) + client.read_modify_write_row(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_ping_and_warm_rest_required_fields(request_type=bigtable.PingAndWarmRequest): +def test_read_modify_write_row_rest_required_fields( + request_type=bigtable.ReadModifyWriteRowRequest, +): transport_class = transports.BigtableRestTransport request_init = {} - request_init["name"] = "" + request_init["row_key"] = b"" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6318,21 +6941,21 @@ def test_ping_and_warm_rest_required_fields(request_type=bigtable.PingAndWarmReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).ping_and_warm._get_unset_required_fields(jsonified_request) + ).read_modify_write_row._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["rowKey"] = b"row_key_blob" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).ping_and_warm._get_unset_required_fields(jsonified_request) + ).read_modify_write_row._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "rowKey" in jsonified_request + assert jsonified_request["rowKey"] == b"row_key_blob" client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6341,7 +6964,7 @@ def test_ping_and_warm_rest_required_fields(request_type=bigtable.PingAndWarmReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = bigtable.PingAndWarmResponse() + return_value = bigtable.ReadModifyWriteRowResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6363,30 +6986,38 @@ def test_ping_and_warm_rest_required_fields(request_type=bigtable.PingAndWarmReq response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.PingAndWarmResponse.pb(return_value) + return_value = bigtable.ReadModifyWriteRowResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.ping_and_warm(request) + response = client.read_modify_write_row(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_ping_and_warm_rest_unset_required_fields(): +def test_read_modify_write_row_rest_unset_required_fields(): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.ping_and_warm._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.read_modify_write_row._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "rowKey", + "rules", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_ping_and_warm_rest_interceptors(null_interceptor): +def test_read_modify_write_row_rest_interceptors(null_interceptor): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), @@ -6397,13 +7028,15 @@ def test_ping_and_warm_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableRestInterceptor, "post_ping_and_warm" + transports.BigtableRestInterceptor, "post_read_modify_write_row" ) as post, mock.patch.object( - transports.BigtableRestInterceptor, "pre_ping_and_warm" + transports.BigtableRestInterceptor, "pre_read_modify_write_row" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable.PingAndWarmRequest.pb(bigtable.PingAndWarmRequest()) + pb_message = bigtable.ReadModifyWriteRowRequest.pb( + bigtable.ReadModifyWriteRowRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6414,19 +7047,19 @@ def test_ping_and_warm_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = bigtable.PingAndWarmResponse.to_json( - bigtable.PingAndWarmResponse() + req.return_value._content = bigtable.ReadModifyWriteRowResponse.to_json( + bigtable.ReadModifyWriteRowResponse() ) - request = bigtable.PingAndWarmRequest() + request = bigtable.ReadModifyWriteRowRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = bigtable.PingAndWarmResponse() + post.return_value = bigtable.ReadModifyWriteRowResponse() - client.ping_and_warm( + client.read_modify_write_row( request, metadata=[ ("key", "val"), @@ -6438,8 +7071,8 @@ def test_ping_and_warm_rest_interceptors(null_interceptor): post.assert_called_once() -def test_ping_and_warm_rest_bad_request( - transport: str = "rest", request_type=bigtable.PingAndWarmRequest +def test_read_modify_write_row_rest_bad_request( + transport: str = "rest", request_type=bigtable.ReadModifyWriteRowRequest ): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6447,7 +7080,7 @@ def test_ping_and_warm_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2"} + request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6459,10 +7092,10 @@ def test_ping_and_warm_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.ping_and_warm(request) + client.read_modify_write_row(request) -def test_ping_and_warm_rest_flattened(): +def test_read_modify_write_row_rest_flattened(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6471,14 +7104,18 @@ def test_ping_and_warm_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.PingAndWarmResponse() + return_value = bigtable.ReadModifyWriteRowResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/instances/sample2"} + sample_request = { + "table_name": "projects/sample1/instances/sample2/tables/sample3" + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + table_name="table_name_value", + row_key=b"row_key_blob", + rules=[data.ReadModifyWriteRule(family_name="family_name_value")], app_profile_id="app_profile_id_value", ) mock_args.update(sample_request) @@ -6487,23 +7124,25 @@ def test_ping_and_warm_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.PingAndWarmResponse.pb(return_value) + return_value = bigtable.ReadModifyWriteRowResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.ping_and_warm(**mock_args) + client.read_modify_write_row(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/instances/*}:ping" % client.transport._host, args[1] + "%s/v2/{table_name=projects/*/instances/*/tables/*}:readModifyWriteRow" + % client.transport._host, + args[1], ) -def test_ping_and_warm_rest_flattened_error(transport: str = "rest"): +def test_read_modify_write_row_rest_flattened_error(transport: str = "rest"): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6512,14 +7151,16 @@ def test_ping_and_warm_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.ping_and_warm( - bigtable.PingAndWarmRequest(), - name="name_value", + client.read_modify_write_row( + bigtable.ReadModifyWriteRowRequest(), + table_name="table_name_value", + row_key=b"row_key_blob", + rules=[data.ReadModifyWriteRule(family_name="family_name_value")], app_profile_id="app_profile_id_value", ) -def test_ping_and_warm_rest_error(): +def test_read_modify_write_row_rest_error(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6528,11 +7169,11 @@ def test_ping_and_warm_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable.ReadModifyWriteRowRequest, + bigtable.GenerateInitialChangeStreamPartitionsRequest, dict, ], ) -def test_read_modify_write_row_rest(request_type): +def test_generate_initial_change_stream_partitions_rest(request_type): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6545,24 +7186,33 @@ def test_read_modify_write_row_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.ReadModifyWriteRowResponse() + return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.ReadModifyWriteRowResponse.pb(return_value) + return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.read_modify_write_row(request) + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.generate_initial_change_stream_partitions(request) + + assert isinstance(response, Iterable) + response = next(response) # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.ReadModifyWriteRowResponse) + assert isinstance(response, bigtable.GenerateInitialChangeStreamPartitionsResponse) -def test_read_modify_write_row_rest_use_cached_wrapped_rpc(): +def test_generate_initial_change_stream_partitions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6577,7 +7227,7 @@ def test_read_modify_write_row_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.read_modify_write_row + client._transport.generate_initial_change_stream_partitions in client._transport._wrapped_methods ) @@ -6587,29 +7237,29 @@ def test_read_modify_write_row_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.read_modify_write_row + client._transport.generate_initial_change_stream_partitions ] = mock_rpc request = {} - client.read_modify_write_row(request) + client.generate_initial_change_stream_partitions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.read_modify_write_row(request) + client.generate_initial_change_stream_partitions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_read_modify_write_row_rest_required_fields( - request_type=bigtable.ReadModifyWriteRowRequest, +def test_generate_initial_change_stream_partitions_rest_required_fields( + request_type=bigtable.GenerateInitialChangeStreamPartitionsRequest, ): transport_class = transports.BigtableRestTransport request_init = {} - request_init["row_key"] = b"" + request_init["table_name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6620,21 +7270,25 @@ def test_read_modify_write_row_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).read_modify_write_row._get_unset_required_fields(jsonified_request) + ).generate_initial_change_stream_partitions._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["rowKey"] = b"row_key_blob" + jsonified_request["tableName"] = "table_name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).read_modify_write_row._get_unset_required_fields(jsonified_request) + ).generate_initial_change_stream_partitions._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "rowKey" in jsonified_request - assert jsonified_request["rowKey"] == b"row_key_blob" + assert "tableName" in jsonified_request + assert jsonified_request["tableName"] == "table_name_value" client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6643,7 +7297,7 @@ def test_read_modify_write_row_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = bigtable.ReadModifyWriteRowResponse() + return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6665,38 +7319,39 @@ def test_read_modify_write_row_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.ReadModifyWriteRowResponse.pb(return_value) + return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.read_modify_write_row(request) + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.generate_initial_change_stream_partitions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_read_modify_write_row_rest_unset_required_fields(): +def test_generate_initial_change_stream_partitions_rest_unset_required_fields(): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.read_modify_write_row._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "rowKey", - "rules", - ) + unset_fields = ( + transport.generate_initial_change_stream_partitions._get_unset_required_fields( + {} ) ) + assert set(unset_fields) == (set(()) & set(("tableName",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_read_modify_write_row_rest_interceptors(null_interceptor): +def test_generate_initial_change_stream_partitions_rest_interceptors(null_interceptor): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), @@ -6707,14 +7362,16 @@ def test_read_modify_write_row_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableRestInterceptor, "post_read_modify_write_row" + transports.BigtableRestInterceptor, + "post_generate_initial_change_stream_partitions", ) as post, mock.patch.object( - transports.BigtableRestInterceptor, "pre_read_modify_write_row" + transports.BigtableRestInterceptor, + "pre_generate_initial_change_stream_partitions", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable.ReadModifyWriteRowRequest.pb( - bigtable.ReadModifyWriteRowRequest() + pb_message = bigtable.GenerateInitialChangeStreamPartitionsRequest.pb( + bigtable.GenerateInitialChangeStreamPartitionsRequest() ) transcode.return_value = { "method": "post", @@ -6726,19 +7383,22 @@ def test_read_modify_write_row_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = bigtable.ReadModifyWriteRowResponse.to_json( - bigtable.ReadModifyWriteRowResponse() + req.return_value._content = ( + bigtable.GenerateInitialChangeStreamPartitionsResponse.to_json( + bigtable.GenerateInitialChangeStreamPartitionsResponse() + ) ) + req.return_value._content = "[{}]".format(req.return_value._content) - request = bigtable.ReadModifyWriteRowRequest() + request = bigtable.GenerateInitialChangeStreamPartitionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = bigtable.ReadModifyWriteRowResponse() + post.return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse() - client.read_modify_write_row( + client.generate_initial_change_stream_partitions( request, metadata=[ ("key", "val"), @@ -6750,8 +7410,9 @@ def test_read_modify_write_row_rest_interceptors(null_interceptor): post.assert_called_once() -def test_read_modify_write_row_rest_bad_request( - transport: str = "rest", request_type=bigtable.ReadModifyWriteRowRequest +def test_generate_initial_change_stream_partitions_rest_bad_request( + transport: str = "rest", + request_type=bigtable.GenerateInitialChangeStreamPartitionsRequest, ): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6771,10 +7432,10 @@ def test_read_modify_write_row_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.read_modify_write_row(request) + client.generate_initial_change_stream_partitions(request) -def test_read_modify_write_row_rest_flattened(): +def test_generate_initial_change_stream_partitions_rest_flattened(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6783,7 +7444,7 @@ def test_read_modify_write_row_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.ReadModifyWriteRowResponse() + return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -6793,8 +7454,6 @@ def test_read_modify_write_row_rest_flattened(): # get truthy value for each flattened field mock_args = dict( table_name="table_name_value", - row_key=b"row_key_blob", - rules=[data.ReadModifyWriteRule(family_name="family_name_value")], app_profile_id="app_profile_id_value", ) mock_args.update(sample_request) @@ -6803,25 +7462,32 @@ def test_read_modify_write_row_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.ReadModifyWriteRowResponse.pb(return_value) + return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.read_modify_write_row(**mock_args) + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + client.generate_initial_change_stream_partitions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{table_name=projects/*/instances/*/tables/*}:readModifyWriteRow" + "%s/v2/{table_name=projects/*/instances/*/tables/*}:generateInitialChangeStreamPartitions" % client.transport._host, args[1], ) -def test_read_modify_write_row_rest_flattened_error(transport: str = "rest"): +def test_generate_initial_change_stream_partitions_rest_flattened_error( + transport: str = "rest", +): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6830,16 +7496,14 @@ def test_read_modify_write_row_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.read_modify_write_row( - bigtable.ReadModifyWriteRowRequest(), + client.generate_initial_change_stream_partitions( + bigtable.GenerateInitialChangeStreamPartitionsRequest(), table_name="table_name_value", - row_key=b"row_key_blob", - rules=[data.ReadModifyWriteRule(family_name="family_name_value")], app_profile_id="app_profile_id_value", ) -def test_read_modify_write_row_rest_error(): +def test_generate_initial_change_stream_partitions_rest_error(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6848,11 +7512,11 @@ def test_read_modify_write_row_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable.GenerateInitialChangeStreamPartitionsRequest, + bigtable.ReadChangeStreamRequest, dict, ], ) -def test_generate_initial_change_stream_partitions_rest(request_type): +def test_read_change_stream_rest(request_type): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6865,15 +7529,13 @@ def test_generate_initial_change_stream_partitions_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse() + return_value = bigtable.ReadChangeStreamResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse.pb( - return_value - ) + return_value = bigtable.ReadChangeStreamResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -6882,16 +7544,16 @@ def test_generate_initial_change_stream_partitions_rest(request_type): req.return_value = response_value with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) - response = client.generate_initial_change_stream_partitions(request) + response = client.read_change_stream(request) assert isinstance(response, Iterable) response = next(response) # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.GenerateInitialChangeStreamPartitionsResponse) + assert isinstance(response, bigtable.ReadChangeStreamResponse) -def test_generate_initial_change_stream_partitions_rest_use_cached_wrapped_rpc(): +def test_read_change_stream_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6906,8 +7568,7 @@ def test_generate_initial_change_stream_partitions_rest_use_cached_wrapped_rpc() # Ensure method has been cached assert ( - client._transport.generate_initial_change_stream_partitions - in client._transport._wrapped_methods + client._transport.read_change_stream in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -6916,24 +7577,24 @@ def test_generate_initial_change_stream_partitions_rest_use_cached_wrapped_rpc() "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.generate_initial_change_stream_partitions + client._transport.read_change_stream ] = mock_rpc request = {} - client.generate_initial_change_stream_partitions(request) + client.read_change_stream(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.generate_initial_change_stream_partitions(request) + client.read_change_stream(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_generate_initial_change_stream_partitions_rest_required_fields( - request_type=bigtable.GenerateInitialChangeStreamPartitionsRequest, +def test_read_change_stream_rest_required_fields( + request_type=bigtable.ReadChangeStreamRequest, ): transport_class = transports.BigtableRestTransport @@ -6949,9 +7610,7 @@ def test_generate_initial_change_stream_partitions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).generate_initial_change_stream_partitions._get_unset_required_fields( - jsonified_request - ) + ).read_change_stream._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6960,9 +7619,7 @@ def test_generate_initial_change_stream_partitions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).generate_initial_change_stream_partitions._get_unset_required_fields( - jsonified_request - ) + ).read_change_stream._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6976,7 +7633,7 @@ def test_generate_initial_change_stream_partitions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse() + return_value = bigtable.ReadChangeStreamResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6998,9 +7655,7 @@ def test_generate_initial_change_stream_partitions_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse.pb( - return_value - ) + return_value = bigtable.ReadChangeStreamResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -7009,28 +7664,24 @@ def test_generate_initial_change_stream_partitions_rest_required_fields( with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) - response = client.generate_initial_change_stream_partitions(request) + response = client.read_change_stream(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_generate_initial_change_stream_partitions_rest_unset_required_fields(): +def test_read_change_stream_rest_unset_required_fields(): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.generate_initial_change_stream_partitions._get_unset_required_fields( - {} - ) - ) + unset_fields = transport.read_change_stream._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("tableName",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_generate_initial_change_stream_partitions_rest_interceptors(null_interceptor): +def test_read_change_stream_rest_interceptors(null_interceptor): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), @@ -7041,16 +7692,14 @@ def test_generate_initial_change_stream_partitions_rest_interceptors(null_interc ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableRestInterceptor, - "post_generate_initial_change_stream_partitions", + transports.BigtableRestInterceptor, "post_read_change_stream" ) as post, mock.patch.object( - transports.BigtableRestInterceptor, - "pre_generate_initial_change_stream_partitions", + transports.BigtableRestInterceptor, "pre_read_change_stream" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable.GenerateInitialChangeStreamPartitionsRequest.pb( - bigtable.GenerateInitialChangeStreamPartitionsRequest() + pb_message = bigtable.ReadChangeStreamRequest.pb( + bigtable.ReadChangeStreamRequest() ) transcode.return_value = { "method": "post", @@ -7062,22 +7711,20 @@ def test_generate_initial_change_stream_partitions_rest_interceptors(null_interc req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - bigtable.GenerateInitialChangeStreamPartitionsResponse.to_json( - bigtable.GenerateInitialChangeStreamPartitionsResponse() - ) + req.return_value._content = bigtable.ReadChangeStreamResponse.to_json( + bigtable.ReadChangeStreamResponse() ) req.return_value._content = "[{}]".format(req.return_value._content) - request = bigtable.GenerateInitialChangeStreamPartitionsRequest() + request = bigtable.ReadChangeStreamRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse() + post.return_value = bigtable.ReadChangeStreamResponse() - client.generate_initial_change_stream_partitions( + client.read_change_stream( request, metadata=[ ("key", "val"), @@ -7089,9 +7736,8 @@ def test_generate_initial_change_stream_partitions_rest_interceptors(null_interc post.assert_called_once() -def test_generate_initial_change_stream_partitions_rest_bad_request( - transport: str = "rest", - request_type=bigtable.GenerateInitialChangeStreamPartitionsRequest, +def test_read_change_stream_rest_bad_request( + transport: str = "rest", request_type=bigtable.ReadChangeStreamRequest ): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7111,10 +7757,10 @@ def test_generate_initial_change_stream_partitions_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.generate_initial_change_stream_partitions(request) + client.read_change_stream(request) -def test_generate_initial_change_stream_partitions_rest_flattened(): +def test_read_change_stream_rest_flattened(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7123,7 +7769,7 @@ def test_generate_initial_change_stream_partitions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse() + return_value = bigtable.ReadChangeStreamResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -7141,9 +7787,7 @@ def test_generate_initial_change_stream_partitions_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.GenerateInitialChangeStreamPartitionsResponse.pb( - return_value - ) + return_value = bigtable.ReadChangeStreamResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -7151,22 +7795,20 @@ def test_generate_initial_change_stream_partitions_rest_flattened(): with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) - client.generate_initial_change_stream_partitions(**mock_args) + client.read_change_stream(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{table_name=projects/*/instances/*/tables/*}:generateInitialChangeStreamPartitions" + "%s/v2/{table_name=projects/*/instances/*/tables/*}:readChangeStream" % client.transport._host, args[1], ) -def test_generate_initial_change_stream_partitions_rest_flattened_error( - transport: str = "rest", -): +def test_read_change_stream_rest_flattened_error(transport: str = "rest"): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7175,14 +7817,14 @@ def test_generate_initial_change_stream_partitions_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.generate_initial_change_stream_partitions( - bigtable.GenerateInitialChangeStreamPartitionsRequest(), + client.read_change_stream( + bigtable.ReadChangeStreamRequest(), table_name="table_name_value", app_profile_id="app_profile_id_value", ) -def test_generate_initial_change_stream_partitions_rest_error(): +def test_read_change_stream_rest_error(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7191,30 +7833,30 @@ def test_generate_initial_change_stream_partitions_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable.ReadChangeStreamRequest, + bigtable.ExecuteQueryRequest, dict, ], ) -def test_read_change_stream_rest(request_type): +def test_execute_query_rest(request_type): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = {"instance_name": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.ReadChangeStreamResponse() + return_value = bigtable.ExecuteQueryResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.ReadChangeStreamResponse.pb(return_value) + return_value = bigtable.ExecuteQueryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -7223,16 +7865,16 @@ def test_read_change_stream_rest(request_type): req.return_value = response_value with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) - response = client.read_change_stream(request) + response = client.execute_query(request) assert isinstance(response, Iterable) response = next(response) # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.ReadChangeStreamResponse) + assert isinstance(response, bigtable.ExecuteQueryResponse) -def test_read_change_stream_rest_use_cached_wrapped_rpc(): +def test_execute_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7246,39 +7888,34 @@ def test_read_change_stream_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.read_change_stream in client._transport._wrapped_methods - ) + assert client._transport.execute_query in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_change_stream - ] = mock_rpc + client._transport._wrapped_methods[client._transport.execute_query] = mock_rpc request = {} - client.read_change_stream(request) + client.execute_query(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.read_change_stream(request) + client.execute_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_read_change_stream_rest_required_fields( - request_type=bigtable.ReadChangeStreamRequest, -): +def test_execute_query_rest_required_fields(request_type=bigtable.ExecuteQueryRequest): transport_class = transports.BigtableRestTransport request_init = {} - request_init["table_name"] = "" + request_init["instance_name"] = "" + request_init["query"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7289,21 +7926,24 @@ def test_read_change_stream_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).read_change_stream._get_unset_required_fields(jsonified_request) + ).execute_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["tableName"] = "table_name_value" + jsonified_request["instanceName"] = "instance_name_value" + jsonified_request["query"] = "query_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).read_change_stream._get_unset_required_fields(jsonified_request) + ).execute_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "tableName" in jsonified_request - assert jsonified_request["tableName"] == "table_name_value" + assert "instanceName" in jsonified_request + assert jsonified_request["instanceName"] == "instance_name_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7312,7 +7952,7 @@ def test_read_change_stream_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = bigtable.ReadChangeStreamResponse() + return_value = bigtable.ExecuteQueryResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7334,7 +7974,7 @@ def test_read_change_stream_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.ReadChangeStreamResponse.pb(return_value) + return_value = bigtable.ExecuteQueryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -7343,24 +7983,33 @@ def test_read_change_stream_rest_required_fields( with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) - response = client.read_change_stream(request) + response = client.execute_query(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_read_change_stream_rest_unset_required_fields(): +def test_execute_query_rest_unset_required_fields(): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.read_change_stream._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("tableName",))) + unset_fields = transport.execute_query._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instanceName", + "query", + "params", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_read_change_stream_rest_interceptors(null_interceptor): +def test_execute_query_rest_interceptors(null_interceptor): transport = transports.BigtableRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BigtableRestInterceptor(), @@ -7371,15 +8020,13 @@ def test_read_change_stream_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableRestInterceptor, "post_read_change_stream" + transports.BigtableRestInterceptor, "post_execute_query" ) as post, mock.patch.object( - transports.BigtableRestInterceptor, "pre_read_change_stream" + transports.BigtableRestInterceptor, "pre_execute_query" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable.ReadChangeStreamRequest.pb( - bigtable.ReadChangeStreamRequest() - ) + pb_message = bigtable.ExecuteQueryRequest.pb(bigtable.ExecuteQueryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7390,20 +8037,20 @@ def test_read_change_stream_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = bigtable.ReadChangeStreamResponse.to_json( - bigtable.ReadChangeStreamResponse() + req.return_value._content = bigtable.ExecuteQueryResponse.to_json( + bigtable.ExecuteQueryResponse() ) req.return_value._content = "[{}]".format(req.return_value._content) - request = bigtable.ReadChangeStreamRequest() + request = bigtable.ExecuteQueryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = bigtable.ReadChangeStreamResponse() + post.return_value = bigtable.ExecuteQueryResponse() - client.read_change_stream( + client.execute_query( request, metadata=[ ("key", "val"), @@ -7415,8 +8062,8 @@ def test_read_change_stream_rest_interceptors(null_interceptor): post.assert_called_once() -def test_read_change_stream_rest_bad_request( - transport: str = "rest", request_type=bigtable.ReadChangeStreamRequest +def test_execute_query_rest_bad_request( + transport: str = "rest", request_type=bigtable.ExecuteQueryRequest ): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7424,7 +8071,7 @@ def test_read_change_stream_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"table_name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = {"instance_name": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7436,10 +8083,10 @@ def test_read_change_stream_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.read_change_stream(request) + client.execute_query(request) -def test_read_change_stream_rest_flattened(): +def test_execute_query_rest_flattened(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7448,16 +8095,15 @@ def test_read_change_stream_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable.ReadChangeStreamResponse() + return_value = bigtable.ExecuteQueryResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "table_name": "projects/sample1/instances/sample2/tables/sample3" - } + sample_request = {"instance_name": "projects/sample1/instances/sample2"} # get truthy value for each flattened field mock_args = dict( - table_name="table_name_value", + instance_name="instance_name_value", + query="query_value", app_profile_id="app_profile_id_value", ) mock_args.update(sample_request) @@ -7466,7 +8112,7 @@ def test_read_change_stream_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = bigtable.ReadChangeStreamResponse.pb(return_value) + return_value = bigtable.ExecuteQueryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -7474,20 +8120,20 @@ def test_read_change_stream_rest_flattened(): with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) - client.read_change_stream(**mock_args) + client.execute_query(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{table_name=projects/*/instances/*/tables/*}:readChangeStream" + "%s/v2/{instance_name=projects/*/instances/*}:executeQuery" % client.transport._host, args[1], ) -def test_read_change_stream_rest_flattened_error(transport: str = "rest"): +def test_execute_query_rest_flattened_error(transport: str = "rest"): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7496,14 +8142,15 @@ def test_read_change_stream_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.read_change_stream( - bigtable.ReadChangeStreamRequest(), - table_name="table_name_value", + client.execute_query( + bigtable.ExecuteQueryRequest(), + instance_name="instance_name_value", + query="query_value", app_profile_id="app_profile_id_value", ) -def test_read_change_stream_rest_error(): +def test_execute_query_rest_error(): client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7657,6 +8304,7 @@ def test_bigtable_base_transport(): "read_modify_write_row", "generate_initial_change_stream_partitions", "read_change_stream", + "execute_query", ) for method in methods: with pytest.raises(NotImplementedError): @@ -7967,6 +8615,9 @@ def test_bigtable_client_transport_session_collision(transport_name): session1 = client1.transport.read_change_stream._session session2 = client2.transport.read_change_stream._session assert session1 != session2 + session1 = client1.transport.execute_query._session + session2 = client2.transport.execute_query._session + assert session1 != session2 def test_bigtable_grpc_transport_channel(): From 0bddf11cdf9f9e6574b45fc142b7c37f0b57efc2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 22 Jul 2024 11:56:46 -0700 Subject: [PATCH 4/4] chore(main): release 2.25.0 (#1001) --- .github/sync-repo-settings.yaml | 1 - .release-please-manifest.json | 2 +- CHANGELOG.md | 13 +++++++++++++ google/cloud/bigtable/gapic_version.py | 2 +- google/cloud/bigtable_admin/gapic_version.py | 2 +- google/cloud/bigtable_admin_v2/gapic_version.py | 2 +- google/cloud/bigtable_v2/gapic_version.py | 2 +- 7 files changed, 18 insertions(+), 6 deletions(-) diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 1319e555d..df49eafcc 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -31,7 +31,6 @@ branchProtectionRules: - 'Kokoro' - 'Kokoro system-3.8' - 'cla/google' - - 'Conformance / Async v3 Client / Python 3.8 / Test Tag v0.0.2' - 'OwlBot Post Processor' # List of explicit permissions to add (additive only) permissionRules: diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 355b3955b..d6c7e9d68 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.24.0" + ".": "2.25.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index d82467b27..92b498748 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-bigtable/#history +## [2.25.0](https://github.com/googleapis/python-bigtable/compare/v2.24.0...v2.25.0) (2024-07-18) + + +### Features + +* Publish ProtoRows Message ([7ac8e14](https://github.com/googleapis/python-bigtable/commit/7ac8e142f99a6891b6bc286858f764def503e89a)) +* Publish the Cloud Bigtable ExecuteQuery API ([7ac8e14](https://github.com/googleapis/python-bigtable/commit/7ac8e142f99a6891b6bc286858f764def503e89a)) + + +### Bug Fixes + +* Allow protobuf 5.x ([7ac8e14](https://github.com/googleapis/python-bigtable/commit/7ac8e142f99a6891b6bc286858f764def503e89a)) + ## [2.24.0](https://github.com/googleapis/python-bigtable/compare/v2.23.1...v2.24.0) (2024-06-11) diff --git a/google/cloud/bigtable/gapic_version.py b/google/cloud/bigtable/gapic_version.py index 07de09d56..e5fa8f60b 100644 --- a/google/cloud/bigtable/gapic_version.py +++ b/google/cloud/bigtable/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.0" # {x-release-please-version} +__version__ = "2.25.0" # {x-release-please-version} diff --git a/google/cloud/bigtable_admin/gapic_version.py b/google/cloud/bigtable_admin/gapic_version.py index 07de09d56..e5fa8f60b 100644 --- a/google/cloud/bigtable_admin/gapic_version.py +++ b/google/cloud/bigtable_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.0" # {x-release-please-version} +__version__ = "2.25.0" # {x-release-please-version} diff --git a/google/cloud/bigtable_admin_v2/gapic_version.py b/google/cloud/bigtable_admin_v2/gapic_version.py index 07de09d56..e5fa8f60b 100644 --- a/google/cloud/bigtable_admin_v2/gapic_version.py +++ b/google/cloud/bigtable_admin_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.0" # {x-release-please-version} +__version__ = "2.25.0" # {x-release-please-version} diff --git a/google/cloud/bigtable_v2/gapic_version.py b/google/cloud/bigtable_v2/gapic_version.py index 07de09d56..e5fa8f60b 100644 --- a/google/cloud/bigtable_v2/gapic_version.py +++ b/google/cloud/bigtable_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.0" # {x-release-please-version} +__version__ = "2.25.0" # {x-release-please-version}